diff --git a/docs/developer/advanced/running-elasticsearch.asciidoc b/docs/developer/advanced/running-elasticsearch.asciidoc index 2361f805c7635..e5c86fafd1ce7 100644 --- a/docs/developer/advanced/running-elasticsearch.asciidoc +++ b/docs/developer/advanced/running-elasticsearch.asciidoc @@ -13,6 +13,10 @@ This will run a snapshot of {es} that is usually built nightly. Read more about ---- yarn es snapshot ---- +By default, two users are added to Elasticsearch: + + - A superuser with username: `elastic` and password: `changeme`, which can be used to log into Kibana with. + - A user with username: `kibana_system` and password `changeme`. This account is used by the Kibana server to authenticate itself to Elasticsearch, and to perform certain actions on behalf of the end user. These credentials should be specified in your kibana.yml as described in <> See all available options, like how to specify a specific license, with the `--help` flag. @@ -115,4 +119,4 @@ PUT _cluster/settings } ---- -Follow the cross-cluster search instructions for setting up index patterns to search across clusters (<>). \ No newline at end of file +Follow the cross-cluster search instructions for setting up index patterns to search across clusters (<>). diff --git a/docs/maps/trouble-shooting.asciidoc b/docs/maps/trouble-shooting.asciidoc index cfc47cf6f0e4f..1c53fbd55ea4b 100644 --- a/docs/maps/trouble-shooting.asciidoc +++ b/docs/maps/trouble-shooting.asciidoc @@ -20,6 +20,20 @@ image::maps/images/inspector.png[] [float] === Solutions to common problems +[float] +==== Index not listed when adding layer + +* Verify your geospatial data is correctly mapped as {ref}/geo-point.html[geo_point] or {ref}/geo-shape.html[geo_shape]. + ** Run `GET myIndexPatternTitle/_field_caps?fields=myGeoFieldName` in <>, replacing `myIndexPatternTitle` and `myGeoFieldName` with your index pattern title and geospatial field name. + ** Ensure response specifies `type` as `geo_point` or `geo_shape`. +* Verify your geospatial data is correctly mapped in your <>. + ** Open your index pattern in <>. + ** Ensure your geospatial field type is `geo_point` or `geo_shape`. + ** Ensure your geospatial field is searchable and aggregatable. + ** If your geospatial field type does not match your Elasticsearch mapping, click the *Refresh* button to refresh the field list from Elasticsearch. +* Index patterns with thousands of fields can exceed the default maximum payload size. +Increase <> for large index patterns. + [float] ==== Features are not displayed diff --git a/package.json b/package.json index 0d6bc8cc1fceb..594f0ce583987 100644 --- a/package.json +++ b/package.json @@ -317,6 +317,7 @@ "@types/accept": "3.1.1", "@types/angular": "^1.6.56", "@types/angular-mocks": "^1.7.0", + "@types/archiver": "^3.1.0", "@types/babel__core": "^7.1.2", "@types/bluebird": "^3.1.1", "@types/boom": "^7.2.0", @@ -398,6 +399,7 @@ "@types/testing-library__react-hooks": "^3.1.0", "@types/type-detect": "^4.0.1", "@types/uuid": "^3.4.4", + "@types/vinyl": "^2.0.4", "@types/vinyl-fs": "^2.4.11", "@types/zen-observable": "^0.8.0", "@typescript-eslint/eslint-plugin": "^2.34.0", @@ -474,6 +476,7 @@ "license-checker": "^16.0.0", "listr": "^0.14.1", "load-grunt-config": "^3.0.1", + "load-json-file": "^6.2.0", "mocha": "^7.1.1", "mock-fs": "^4.12.0", "mock-http-server": "1.3.0", diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json index b307bd41bb4dd..83a7a7607816c 100644 --- a/packages/kbn-dev-utils/package.json +++ b/packages/kbn-dev-utils/package.json @@ -20,6 +20,7 @@ "normalize-path": "^3.0.0", "moment": "^2.24.0", "rxjs": "^6.5.5", + "strip-ansi": "^6.0.0", "tree-kill": "^1.2.2", "tslib": "^2.0.0" }, diff --git a/packages/kbn-dev-utils/src/index.ts b/packages/kbn-dev-utils/src/index.ts index 582526f939e42..798746d159f60 100644 --- a/packages/kbn-dev-utils/src/index.ts +++ b/packages/kbn-dev-utils/src/index.ts @@ -19,7 +19,7 @@ export { withProcRunner, ProcRunner } from './proc_runner'; export * from './tooling_log'; -export { createAbsolutePathSerializer } from './serializers'; +export * from './serializers'; export { CA_CERT_PATH, ES_KEY_PATH, diff --git a/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts b/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts index af55622c76198..884614c8b9551 100644 --- a/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts +++ b/packages/kbn-dev-utils/src/serializers/absolute_path_serializer.ts @@ -21,7 +21,7 @@ import { REPO_ROOT } from '../repo_root'; export function createAbsolutePathSerializer(rootPath: string = REPO_ROOT) { return { - serialize: (value: string) => value.replace(rootPath, '').replace(/\\/g, '/'), test: (value: any) => typeof value === 'string' && value.startsWith(rootPath), + serialize: (value: string) => value.replace(rootPath, '').replace(/\\/g, '/'), }; } diff --git a/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts b/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts new file mode 100644 index 0000000000000..c5cc095e9ee82 --- /dev/null +++ b/packages/kbn-dev-utils/src/serializers/any_instance_serizlizer.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export function createAnyInstanceSerializer(Class: Function, name?: string) { + return { + test: (v: any) => v instanceof Class, + serialize: () => `<${name ?? Class.name}>`, + }; +} diff --git a/packages/kbn-dev-utils/src/serializers/index.ts b/packages/kbn-dev-utils/src/serializers/index.ts index 3b49e243058df..e645a3be3fe5d 100644 --- a/packages/kbn-dev-utils/src/serializers/index.ts +++ b/packages/kbn-dev-utils/src/serializers/index.ts @@ -17,4 +17,7 @@ * under the License. */ -export { createAbsolutePathSerializer } from './absolute_path_serializer'; +export * from './absolute_path_serializer'; +export * from './strip_ansi_serializer'; +export * from './recursive_serializer'; +export * from './any_instance_serizlizer'; diff --git a/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts b/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts new file mode 100644 index 0000000000000..537ae4972c842 --- /dev/null +++ b/packages/kbn-dev-utils/src/serializers/recursive_serializer.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export function createRecursiveSerializer(test: (v: any) => boolean, print: (v: any) => string) { + return { + test: (v: any) => test(v), + serialize: (v: any, ...rest: any[]) => { + const replacement = print(v); + const printer = rest.pop()!; + return printer(replacement, ...rest); + }, + }; +} diff --git a/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts b/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts new file mode 100644 index 0000000000000..4a2151c06f34f --- /dev/null +++ b/packages/kbn-dev-utils/src/serializers/strip_ansi_serializer.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import stripAnsi from 'strip-ansi'; + +import { createRecursiveSerializer } from './recursive_serializer'; + +export function createStripAnsiSerializer() { + return createRecursiveSerializer( + (v) => typeof v === 'string' && stripAnsi(v) !== v, + (v) => stripAnsi(v) + ); +} diff --git a/src/legacy/core_plugins/apm_oss/index.d.ts b/packages/kbn-pm/index.d.ts similarity index 92% rename from src/legacy/core_plugins/apm_oss/index.d.ts rename to packages/kbn-pm/index.d.ts index 86fe4e0350dce..aa55df9215c2f 100644 --- a/src/legacy/core_plugins/apm_oss/index.d.ts +++ b/packages/kbn-pm/index.d.ts @@ -17,6 +17,4 @@ * under the License. */ -export interface ApmOssPlugin { - indexPatterns: string[]; -} +export * from './src/index'; diff --git a/packages/kbn-pm/tsconfig.json b/packages/kbn-pm/tsconfig.json index bfb13ee8dcf8a..c13a9243c50aa 100644 --- a/packages/kbn-pm/tsconfig.json +++ b/packages/kbn-pm/tsconfig.json @@ -1,6 +1,7 @@ { "extends": "../../tsconfig.json", "include": [ + "./index.d.ts", "./src/**/*.ts", "./dist/*.d.ts", ], diff --git a/packages/kbn-spec-to-console/README.md b/packages/kbn-spec-to-console/README.md index 526ceef43e3cd..0328dec791320 100644 --- a/packages/kbn-spec-to-console/README.md +++ b/packages/kbn-spec-to-console/README.md @@ -23,10 +23,10 @@ At the root of the Kibana repository, run the following commands: ```sh # OSS -yarn spec_to_console -g "/rest-api-spec/src/main/resources/rest-api-spec/api/*" -d "src/plugins/console/server/lib/spec_definitions/json" +yarn spec_to_console -g "/rest-api-spec/src/main/resources/rest-api-spec/api/*" -d "src/plugins/console/server/lib/spec_definitions/json/generated" # X-pack -yarn spec_to_console -g "/x-pack/plugin/src/test/resources/rest-api-spec/api/*" -d "x-pack/plugins/console_extensions/server/lib/spec_definitions/json" +yarn spec_to_console -g "/x-pack/plugin/src/test/resources/rest-api-spec/api/*" -d "x-pack/plugins/console_extensions/server/lib/spec_definitions/json/generated" ``` ### Information used in Console that is not available in the REST spec diff --git a/packages/kbn-test/src/failed_tests_reporter/__fixtures__/cypress_report.xml b/packages/kbn-test/src/failed_tests_reporter/__fixtures__/cypress_report.xml new file mode 100644 index 0000000000000..ed0e154552caa --- /dev/null +++ b/packages/kbn-test/src/failed_tests_reporter/__fixtures__/cypress_report.xml @@ -0,0 +1,50 @@ + + + + + + + + + ...` + +You can fix this problem by: + - Passing `{force: true}` which disables all error checking + - Passing `{waitForAnimations: false}` which disables waiting on animations + - Passing `{animationDistanceThreshold: 20}` which decreases the sensitivity + +https://on.cypress.io/element-is-animating + +Because this error occurred during a `after each` hook we are skipping the remaining tests in the current suite: `timeline flyout button` + at cypressErr (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:146621:16) + at cypressErrByPath (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:146630:10) + at Object.throwErrByPath (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:146593:11) + at Object.ensureElementIsNotAnimating (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:137560:24) + at ensureNotAnimating (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:127434:13) + at runAllChecks (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:127522:9) + at retryActionability (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:127542:16) + at tryCatcher (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:9065:23) + at Function.Promise.attempt.Promise.try (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:6339:29) + at tryFn (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:140680:21) + at whenStable (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:140715:12) + at http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:140259:16 + at tryCatcher (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:9065:23) + at Promise._settlePromiseFromHandler (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:7000:31) + at Promise._settlePromise (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:7057:18) + at Promise._settlePromise0 (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:7102:10)]]> + + + diff --git a/packages/kbn-test/src/failed_tests_reporter/__fixtures__/index.ts b/packages/kbn-test/src/failed_tests_reporter/__fixtures__/index.ts index 02b6b5f064218..16ebe10ad5426 100644 --- a/packages/kbn-test/src/failed_tests_reporter/__fixtures__/index.ts +++ b/packages/kbn-test/src/failed_tests_reporter/__fixtures__/index.ts @@ -23,3 +23,4 @@ export const FTR_REPORT = Fs.readFileSync(require.resolve('./ftr_report.xml'), ' export const JEST_REPORT = Fs.readFileSync(require.resolve('./jest_report.xml'), 'utf8'); export const KARMA_REPORT = Fs.readFileSync(require.resolve('./karma_report.xml'), 'utf8'); export const MOCHA_REPORT = Fs.readFileSync(require.resolve('./mocha_report.xml'), 'utf8'); +export const CYPRESS_REPORT = Fs.readFileSync(require.resolve('./cypress_report.xml'), 'utf8'); diff --git a/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.test.ts b/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.test.ts index f8f279151e07f..53a74f6cc6af2 100644 --- a/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.test.ts +++ b/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.test.ts @@ -39,7 +39,13 @@ jest.mock('fs', () => { }; }); -import { FTR_REPORT, JEST_REPORT, MOCHA_REPORT, KARMA_REPORT } from './__fixtures__'; +import { + FTR_REPORT, + JEST_REPORT, + MOCHA_REPORT, + KARMA_REPORT, + CYPRESS_REPORT, +} from './__fixtures__'; import { parseTestReport } from './test_report'; import { addMessagesToReport } from './add_messages_to_report'; @@ -270,6 +276,69 @@ it('rewrites mocha reports with minimal changes', async () => { `); }); +it('rewrites cypress reports with minimal changes', async () => { + const xml = await addMessagesToReport({ + messages: [ + { + classname: '"after each" hook for "toggles open the timeline"', + name: 'timeline flyout button "after each" hook for "toggles open the timeline"', + message: 'Some extra content\n', + }, + ], + report: await parseTestReport(CYPRESS_REPORT), + log, + reportPath: Path.resolve(__dirname, './__fixtures__/cypress_report.xml'), + }); + + expect(createPatch('cypress.xml', CYPRESS_REPORT, xml, { context: 0 })).toMatchInlineSnapshot(` + Index: cypress.xml + =================================================================== + --- cypress.xml [object Object] + +++ cypress.xml + @@ -1,25 +1,16 @@ + -‹?xml version="1.0" encoding="UTF-8"?› + +‹?xml version="1.0" encoding="utf-8"?› + ‹testsuites name="Mocha Tests" time="16.198" tests="2" failures="1"› + - ‹testsuite name="Root Suite" timestamp="2020-07-22T15:06:26" tests="0" file="cypress/integration/timeline_flyout_button.spec.ts" failures="0" time="0"› + - ‹/testsuite› + + ‹testsuite name="Root Suite" timestamp="2020-07-22T15:06:26" tests="0" file="cypress/integration/timeline_flyout_button.spec.ts" failures="0" time="0"/› + ‹testsuite name="timeline flyout button" timestamp="2020-07-22T15:06:26" tests="2" failures="1" time="16.198"› + - ‹testcase name="timeline flyout button toggles open the timeline" time="8.099" classname="toggles open the timeline"› + - ‹/testcase› + + ‹testcase name="timeline flyout button toggles open the timeline" time="8.099" classname="toggles open the timeline"/› + ‹testcase name="timeline flyout button "after each" hook for "toggles open the timeline"" time="8.099" classname=""after each" hook for "toggles open the timeline""› + - ‹failure message="Timed out retrying: \`cy.click()\` could not be issued because this element is currently animating: + + ‹failure message="Timed out retrying: \`cy.click()\` could not be issued because this element is currently animating: \`<button class="euiButtonEmpty euiButtonEmpty--text" type="button" data-test-subj="timeline-new"›...</button›\` You can fix this problem by: - Passing \`{force: true}\` which disables all error checking - Passing \`{waitForAnimations: false}\` which disables waiting on animations - Passing \`{animationDistanceThreshold: 20}\` which decreases the sensitivity https://on.cypress.io/element-is-animating Because this error occurred during a \`after each\` hook we are skipping the remaining tests in the current suite: \`timeline flyout button\`" type="CypressError"›‹![CDATA[Failed Tests Reporter: + + - Some extra content + + -\`<button class="euiButtonEmpty euiButtonEmpty--text" type="button" data-test-subj="timeline-new">...</button>\` + + -You can fix this problem by: + - - Passing \`{force: true}\` which disables all error checking + - - Passing \`{waitForAnimations: false}\` which disables waiting on animations + - - Passing \`{animationDistanceThreshold: 20}\` which decreases the sensitivity + +CypressError: Timed out retrying: \`cy.click()\` could not be issued because this element is currently animating: + + -https://on.cypress.io/element-is-animating + - + -Because this error occurred during a \`after each\` hook we are skipping the remaining tests in the current suite: \`timeline flyout button\`" type="CypressError"›‹![CDATA[CypressError: Timed out retrying: \`cy.click()\` could not be issued because this element is currently animating: + - + \`‹button class="euiButtonEmpty euiButtonEmpty--text" type="button" data-test-subj="timeline-new"›...‹/button›\` + + You can fix this problem by: + - Passing \`{force: true}\` which disables all error checking + @@ -46,5 +37,5 @@ + at Promise._settlePromise (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:7057:18) + at Promise._settlePromise0 (http://elastic:changeme@localhost:61141/__cypress/runner/cypress_runner.js:7102:10)]]›‹/failure› + ‹/testcase› + ‹/testsuite› + -‹/testsuites› + +‹/testsuites› + \\ No newline at end of file + + `); +}); + it('rewrites karma reports with minimal changes', async () => { const xml = await addMessagesToReport({ report: await parseTestReport(KARMA_REPORT), diff --git a/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.ts b/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.ts index 6bc7556db8a47..27bf8a9c7549d 100644 --- a/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.ts +++ b/packages/kbn-test/src/failed_tests_reporter/add_messages_to_report.ts @@ -59,10 +59,14 @@ export async function addMessagesToReport(options: { log.info(`${classname} - ${name}:${messageList}`); const output = `Failed Tests Reporter:${messageList}\n\n`; - if (!testCase['system-out']) { - testCase['system-out'] = [output]; + if (typeof testCase.failure[0] === 'object' && testCase.failure[0].$.message) { + // failure with "messages" ignore the system-out on jenkins + // so we instead extend the failure message + testCase.failure[0]._ = output + testCase.failure[0]._; + } else if (!testCase['system-out']) { + testCase['system-out'] = [{ _: output }]; } else if (typeof testCase['system-out'][0] === 'string') { - testCase['system-out'][0] = output + String(testCase['system-out'][0]); + testCase['system-out'][0] = { _: output + testCase['system-out'][0] }; } else { testCase['system-out'][0]._ = output + testCase['system-out'][0]._; } diff --git a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts index 8a951ac969199..3dfb1ea44d9e7 100644 --- a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts +++ b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts @@ -72,6 +72,7 @@ export function runFailedTestsReporterCli() { } const patterns = flags._.length ? flags._ : DEFAULT_PATTERNS; + log.info('Searching for reports at', patterns); const reportPaths = await globby(patterns, { absolute: true, }); @@ -80,6 +81,7 @@ export function runFailedTestsReporterCli() { throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`); } + log.info('found', reportPaths.length, 'junit reports', reportPaths); const newlyCreatedIssues: Array<{ failure: TestFailure; newIssue: GithubIssueMini; diff --git a/packages/kbn-test/src/failed_tests_reporter/test_report.ts b/packages/kbn-test/src/failed_tests_reporter/test_report.ts index 43d84163462d3..9907ca8b89ca5 100644 --- a/packages/kbn-test/src/failed_tests_reporter/test_report.ts +++ b/packages/kbn-test/src/failed_tests_reporter/test_report.ts @@ -70,7 +70,7 @@ export interface TestCase { } export interface FailedTestCase extends TestCase { - failure: Array; + failure: Array; } /** diff --git a/src/core/server/elasticsearch/client/configure_client.test.ts b/src/core/server/elasticsearch/client/configure_client.test.ts index 32da142764a78..11e3199a79fd2 100644 --- a/src/core/server/elasticsearch/client/configure_client.test.ts +++ b/src/core/server/elasticsearch/client/configure_client.test.ts @@ -118,26 +118,40 @@ describe('configureClient', () => { }); describe('Client logging', () => { - it('logs error when the client emits an error', () => { + it('logs error when the client emits an @elastic/elasticsearch error', () => { + const client = configureClient(config, { logger, scoped: false }); + + const response = createApiResponse({ body: {} }); + client.emit('response', new errors.TimeoutError('message', response), response); + + expect(loggingSystemMock.collect(logger).error).toMatchInlineSnapshot(` + Array [ + Array [ + "[TimeoutError]: message", + ], + ] + `); + }); + + it('logs error when the client emits an ResponseError returned by elasticsearch', () => { const client = configureClient(config, { logger, scoped: false }); const response = createApiResponse({ + statusCode: 400, + headers: {}, body: { error: { - type: 'error message', + type: 'illegal_argument_exception', + reason: 'request [/_path] contains unrecognized parameter: [name]', }, }, }); - client.emit('response', new errors.ResponseError(response), null); - client.emit('response', new Error('some error'), null); + client.emit('response', new errors.ResponseError(response), response); expect(loggingSystemMock.collect(logger).error).toMatchInlineSnapshot(` Array [ Array [ - "ResponseError: error message", - ], - Array [ - "Error: some error", + "[illegal_argument_exception]: request [/_path] contains unrecognized parameter: [name]", ], ] `); diff --git a/src/core/server/elasticsearch/client/configure_client.ts b/src/core/server/elasticsearch/client/configure_client.ts index 5377f8ca1b070..9746ecb538b75 100644 --- a/src/core/server/elasticsearch/client/configure_client.ts +++ b/src/core/server/elasticsearch/client/configure_client.ts @@ -21,6 +21,7 @@ import { stringify } from 'querystring'; import { Client } from '@elastic/elasticsearch'; import { Logger } from '../../logging'; import { parseClientOptions, ElasticsearchClientConfig } from './client_config'; +import { isResponseError } from './errors'; export const configureClient = ( config: ElasticsearchClientConfig, @@ -35,9 +36,15 @@ export const configureClient = ( }; const addLogging = (client: Client, logger: Logger, logQueries: boolean) => { - client.on('response', (err, event) => { - if (err) { - logger.error(`${err.name}: ${err.message}`); + client.on('response', (error, event) => { + if (error) { + const errorMessage = + // error details for response errors provided by elasticsearch + isResponseError(error) + ? `[${event.body.error.type}]: ${event.body.error.reason}` + : `[${error.name}]: ${error.message}`; + + logger.error(errorMessage); } if (event && logQueries) { const params = event.meta.request.params; diff --git a/src/core/server/elasticsearch/client/index.ts b/src/core/server/elasticsearch/client/index.ts index b8125de2ee498..af63dfa6c7f4e 100644 --- a/src/core/server/elasticsearch/client/index.ts +++ b/src/core/server/elasticsearch/client/index.ts @@ -17,7 +17,7 @@ * under the License. */ -export { ElasticsearchClient } from './types'; +export * from './types'; export { IScopedClusterClient, ScopedClusterClient } from './scoped_cluster_client'; export { ElasticsearchClientConfig } from './client_config'; export { IClusterClient, ICustomClusterClient, ClusterClient } from './cluster_client'; diff --git a/src/core/server/elasticsearch/client/mocks.ts b/src/core/server/elasticsearch/client/mocks.ts index ec2885dfdf922..c93294404b52f 100644 --- a/src/core/server/elasticsearch/client/mocks.ts +++ b/src/core/server/elasticsearch/client/mocks.ts @@ -45,7 +45,7 @@ const createInternalClientMock = (): DeeplyMockedKeys => { .forEach((key) => { const propType = typeof obj[key]; if (propType === 'function') { - obj[key] = jest.fn(); + obj[key] = jest.fn(() => createSuccessTransportRequestPromise({})); } else if (propType === 'object' && obj[key] != null) { mockify(obj[key]); } @@ -70,6 +70,7 @@ const createInternalClientMock = (): DeeplyMockedKeys => { return (mock as unknown) as DeeplyMockedKeys; }; +// TODO fix naming ElasticsearchClientMock export type ElasticSearchClientMock = DeeplyMockedKeys; const createClientMock = (): ElasticSearchClientMock => @@ -124,32 +125,41 @@ export type MockedTransportRequestPromise = TransportRequestPromise & { abort: jest.MockedFunction<() => undefined>; }; -const createMockedClientResponse = (body: T): MockedTransportRequestPromise> => { - const response: ApiResponse = { - body, - statusCode: 200, - warnings: [], - headers: {}, - meta: {} as any, - }; +const createSuccessTransportRequestPromise = ( + body: T, + { statusCode = 200 }: { statusCode?: number } = {} +): MockedTransportRequestPromise> => { + const response = createApiResponse({ body, statusCode }); const promise = Promise.resolve(response); (promise as MockedTransportRequestPromise>).abort = jest.fn(); return promise as MockedTransportRequestPromise>; }; -const createMockedClientError = (err: any): MockedTransportRequestPromise => { +const createErrorTransportRequestPromise = (err: any): MockedTransportRequestPromise => { const promise = Promise.reject(err); (promise as MockedTransportRequestPromise).abort = jest.fn(); return promise as MockedTransportRequestPromise; }; +function createApiResponse(opts: Partial = {}): ApiResponse { + return { + body: {}, + statusCode: 200, + headers: {}, + warnings: [], + meta: {} as any, + ...opts, + }; +} + export const elasticsearchClientMock = { createClusterClient: createClusterClientMock, createCustomClusterClient: createCustomClusterClientMock, createScopedClusterClient: createScopedClusterClientMock, createElasticSearchClient: createClientMock, createInternalClient: createInternalClientMock, - createClientResponse: createMockedClientResponse, - createClientError: createMockedClientError, + createSuccessTransportRequestPromise, + createErrorTransportRequestPromise, + createApiResponse, }; diff --git a/src/core/server/elasticsearch/client/retry_call_cluster.test.ts b/src/core/server/elasticsearch/client/retry_call_cluster.test.ts index a7177c0b29047..3aa47e8b40e24 100644 --- a/src/core/server/elasticsearch/client/retry_call_cluster.test.ts +++ b/src/core/server/elasticsearch/client/retry_call_cluster.test.ts @@ -23,7 +23,8 @@ import { loggingSystemMock } from '../../logging/logging_system.mock'; import { retryCallCluster, migrationRetryCallCluster } from './retry_call_cluster'; const dummyBody = { foo: 'bar' }; -const createErrorReturn = (err: any) => elasticsearchClientMock.createClientError(err); +const createErrorReturn = (err: any) => + elasticsearchClientMock.createErrorTransportRequestPromise(err); describe('retryCallCluster', () => { let client: ReturnType; @@ -33,7 +34,9 @@ describe('retryCallCluster', () => { }); it('returns response from ES API call in case of success', async () => { - const successReturn = elasticsearchClientMock.createClientResponse({ ...dummyBody }); + const successReturn = elasticsearchClientMock.createSuccessTransportRequestPromise({ + ...dummyBody, + }); client.asyncSearch.get.mockReturnValue(successReturn); @@ -42,7 +45,9 @@ describe('retryCallCluster', () => { }); it('retries ES API calls that rejects with `NoLivingConnectionsError`', async () => { - const successReturn = elasticsearchClientMock.createClientResponse({ ...dummyBody }); + const successReturn = elasticsearchClientMock.createSuccessTransportRequestPromise({ + ...dummyBody, + }); client.asyncSearch.get .mockImplementationOnce(() => @@ -57,7 +62,9 @@ describe('retryCallCluster', () => { it('rejects when ES API calls reject with other errors', async () => { client.ping .mockImplementationOnce(() => createErrorReturn(new Error('unknown error'))) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); await expect(retryCallCluster(() => client.ping())).rejects.toMatchInlineSnapshot( `[Error: unknown error]` @@ -73,7 +80,9 @@ describe('retryCallCluster', () => { createErrorReturn(new errors.NoLivingConnectionsError('no living connections', {} as any)) ) .mockImplementationOnce(() => createErrorReturn(new Error('unknown error'))) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); await expect(retryCallCluster(() => client.ping())).rejects.toMatchInlineSnapshot( `[Error: unknown error]` @@ -94,7 +103,9 @@ describe('migrationRetryCallCluster', () => { client.ping .mockImplementationOnce(() => createErrorReturn(error)) .mockImplementationOnce(() => createErrorReturn(error)) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); }; it('retries ES API calls that rejects with `NoLivingConnectionsError`', async () => { @@ -225,7 +236,9 @@ describe('migrationRetryCallCluster', () => { } as any) ) ) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); await migrationRetryCallCluster(() => client.ping(), logger, 1); @@ -258,7 +271,9 @@ describe('migrationRetryCallCluster', () => { } as any) ) ) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); await expect( migrationRetryCallCluster(() => client.ping(), logger, 1) @@ -274,7 +289,9 @@ describe('migrationRetryCallCluster', () => { createErrorReturn(new errors.TimeoutError('timeout error', {} as any)) ) .mockImplementationOnce(() => createErrorReturn(new Error('unknown error'))) - .mockImplementationOnce(() => elasticsearchClientMock.createClientResponse({ ...dummyBody })); + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise({ ...dummyBody }) + ); await expect( migrationRetryCallCluster(() => client.ping(), logger, 1) diff --git a/src/core/server/elasticsearch/client/retry_call_cluster.ts b/src/core/server/elasticsearch/client/retry_call_cluster.ts index 1ad039e512215..792f7f0a7fac9 100644 --- a/src/core/server/elasticsearch/client/retry_call_cluster.ts +++ b/src/core/server/elasticsearch/client/retry_call_cluster.ts @@ -27,7 +27,7 @@ const retryResponseStatuses = [ 403, // AuthenticationException 408, // RequestTimeout 410, // Gone -]; +] as const; /** * Retries the provided Elasticsearch API call when a `NoLivingConnectionsError` error is diff --git a/src/core/server/elasticsearch/client/types.ts b/src/core/server/elasticsearch/client/types.ts index 7ce998aab7669..285f52e89a591 100644 --- a/src/core/server/elasticsearch/client/types.ts +++ b/src/core/server/elasticsearch/client/types.ts @@ -41,3 +41,83 @@ export type ElasticsearchClient = Omit< ): TransportRequestPromise; }; }; + +interface ShardsResponse { + total: number; + successful: number; + failed: number; + skipped: number; +} + +interface Explanation { + value: number; + description: string; + details: Explanation[]; +} + +interface ShardsInfo { + total: number; + successful: number; + skipped: number; + failed: number; +} + +export interface CountResponse { + _shards: ShardsInfo; + count: number; +} + +/** + * Maintained until elasticsearch provides response typings out of the box + * https://github.com/elastic/elasticsearch-js/pull/970 + */ +export interface SearchResponse { + took: number; + timed_out: boolean; + _scroll_id?: string; + _shards: ShardsResponse; + hits: { + total: number; + max_score: number; + hits: Array<{ + _index: string; + _type: string; + _id: string; + _score: number; + _source: T; + _version?: number; + _explanation?: Explanation; + fields?: any; + highlight?: any; + inner_hits?: any; + matched_queries?: string[]; + sort?: string[]; + }>; + }; + aggregations?: any; +} + +export interface GetResponse { + _index: string; + _type: string; + _id: string; + _version: number; + _routing?: string; + found: boolean; + _source: T; + _seq_no: number; + _primary_term: number; +} + +export interface DeleteDocumentResponse { + _shards: ShardsResponse; + found: boolean; + _index: string; + _type: string; + _id: string; + _version: number; + result: string; + error?: { + type: string; + }; +} diff --git a/src/core/server/elasticsearch/elasticsearch_service.test.ts b/src/core/server/elasticsearch/elasticsearch_service.test.ts index 4375f09f1ce0b..49f5c8dd98790 100644 --- a/src/core/server/elasticsearch/elasticsearch_service.test.ts +++ b/src/core/server/elasticsearch/elasticsearch_service.test.ts @@ -227,7 +227,7 @@ describe('#setup', () => { it('esNodeVersionCompatibility$ only starts polling when subscribed to', async (done) => { const mockedClient = mockClusterClientInstance.asInternalUser; mockedClient.nodes.info.mockImplementation(() => - elasticsearchClientMock.createClientError(new Error()) + elasticsearchClientMock.createErrorTransportRequestPromise(new Error()) ); const setupContract = await elasticsearchService.setup(setupDeps); @@ -243,7 +243,7 @@ describe('#setup', () => { it('esNodeVersionCompatibility$ stops polling when unsubscribed from', async (done) => { const mockedClient = mockClusterClientInstance.asInternalUser; mockedClient.nodes.info.mockImplementation(() => - elasticsearchClientMock.createClientError(new Error()) + elasticsearchClientMock.createErrorTransportRequestPromise(new Error()) ); const setupContract = await elasticsearchService.setup(setupDeps); @@ -359,7 +359,7 @@ describe('#stop', () => { const mockedClient = mockClusterClientInstance.asInternalUser; mockedClient.nodes.info.mockImplementation(() => - elasticsearchClientMock.createClientError(new Error()) + elasticsearchClientMock.createErrorTransportRequestPromise(new Error()) ); const setupContract = await elasticsearchService.setup(setupDeps); diff --git a/src/core/server/elasticsearch/index.ts b/src/core/server/elasticsearch/index.ts index 8bb77b5dfdee0..32be6e6bf34dd 100644 --- a/src/core/server/elasticsearch/index.ts +++ b/src/core/server/elasticsearch/index.ts @@ -36,4 +36,8 @@ export { ElasticsearchClientConfig, ElasticsearchClient, IScopedClusterClient, + SearchResponse, + GetResponse, + DeleteDocumentResponse, + CountResponse, } from './client'; diff --git a/src/core/server/elasticsearch/legacy/index.ts b/src/core/server/elasticsearch/legacy/index.ts index 165980b9f4522..a1740faac7ddf 100644 --- a/src/core/server/elasticsearch/legacy/index.ts +++ b/src/core/server/elasticsearch/legacy/index.ts @@ -23,6 +23,5 @@ export { } from './cluster_client'; export { ILegacyScopedClusterClient, LegacyScopedClusterClient } from './scoped_cluster_client'; export { LegacyElasticsearchClientConfig } from './elasticsearch_client_config'; -export { retryCallCluster, migrationsRetryCallCluster } from './retry_call_cluster'; export { LegacyElasticsearchError, LegacyElasticsearchErrorHelpers } from './errors'; export * from './api_types'; diff --git a/src/core/server/elasticsearch/legacy/retry_call_cluster.test.ts b/src/core/server/elasticsearch/legacy/retry_call_cluster.test.ts deleted file mode 100644 index 62789a4fe952d..0000000000000 --- a/src/core/server/elasticsearch/legacy/retry_call_cluster.test.ts +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import * as legacyElasticsearch from 'elasticsearch'; - -import { retryCallCluster, migrationsRetryCallCluster } from './retry_call_cluster'; -import { loggingSystemMock } from '../../logging/logging_system.mock'; - -describe('retryCallCluster', () => { - it('retries ES API calls that rejects with NoConnections', () => { - expect.assertions(1); - const callEsApi = jest.fn(); - let i = 0; - const ErrorConstructor = legacyElasticsearch.errors.NoConnections; - callEsApi.mockImplementation(() => { - return i++ <= 2 ? Promise.reject(new ErrorConstructor()) : Promise.resolve('success'); - }); - const retried = retryCallCluster(callEsApi); - return expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`); - }); - - it('rejects when ES API calls reject with other errors', async () => { - expect.assertions(3); - const callEsApi = jest.fn(); - let i = 0; - callEsApi.mockImplementation(() => { - i++; - - return i === 1 - ? Promise.reject(new Error('unknown error')) - : i === 2 - ? Promise.resolve('success') - : i === 3 || i === 4 - ? Promise.reject(new legacyElasticsearch.errors.NoConnections()) - : i === 5 - ? Promise.reject(new Error('unknown error')) - : null; - }); - const retried = retryCallCluster(callEsApi); - await expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`); - await expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`); - return expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`); - }); -}); - -describe('migrationsRetryCallCluster', () => { - const errors = [ - 'NoConnections', - 'ConnectionFault', - 'ServiceUnavailable', - 'RequestTimeout', - 'AuthenticationException', - 'AuthorizationException', - 'Gone', - ]; - - const mockLogger = loggingSystemMock.create(); - - beforeEach(() => { - loggingSystemMock.clear(mockLogger); - }); - - errors.forEach((errorName) => { - it('retries ES API calls that rejects with ' + errorName, () => { - expect.assertions(1); - const callEsApi = jest.fn(); - let i = 0; - const ErrorConstructor = (legacyElasticsearch.errors as any)[errorName]; - callEsApi.mockImplementation(() => { - return i++ <= 2 ? Promise.reject(new ErrorConstructor()) : Promise.resolve('success'); - }); - const retried = migrationsRetryCallCluster(callEsApi, mockLogger.get('mock log'), 1); - return expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`); - }); - }); - - it('retries ES API calls that rejects with snapshot_in_progress_exception', () => { - expect.assertions(1); - const callEsApi = jest.fn(); - let i = 0; - callEsApi.mockImplementation(() => { - return i++ <= 2 - ? Promise.reject({ body: { error: { type: 'snapshot_in_progress_exception' } } }) - : Promise.resolve('success'); - }); - const retried = migrationsRetryCallCluster(callEsApi, mockLogger.get('mock log'), 1); - return expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`); - }); - - it('rejects when ES API calls reject with other errors', async () => { - expect.assertions(3); - const callEsApi = jest.fn(); - let i = 0; - callEsApi.mockImplementation(() => { - i++; - - return i === 1 - ? Promise.reject(new Error('unknown error')) - : i === 2 - ? Promise.resolve('success') - : i === 3 || i === 4 - ? Promise.reject(new legacyElasticsearch.errors.NoConnections()) - : i === 5 - ? Promise.reject(new Error('unknown error')) - : null; - }); - const retried = migrationsRetryCallCluster(callEsApi, mockLogger.get('mock log'), 1); - await expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`); - await expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`); - return expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`); - }); - - it('logs only once for each unique error message', async () => { - const callEsApi = jest.fn(); - callEsApi.mockRejectedValueOnce(new legacyElasticsearch.errors.NoConnections()); - callEsApi.mockRejectedValueOnce(new legacyElasticsearch.errors.NoConnections()); - callEsApi.mockRejectedValueOnce(new legacyElasticsearch.errors.AuthenticationException()); - callEsApi.mockResolvedValueOnce('done'); - const retried = migrationsRetryCallCluster(callEsApi, mockLogger.get('mock log'), 1); - await retried('endpoint'); - expect(loggingSystemMock.collect(mockLogger).warn).toMatchInlineSnapshot(` - Array [ - Array [ - "Unable to connect to Elasticsearch. Error: No Living connections", - ], - Array [ - "Unable to connect to Elasticsearch. Error: Authentication Exception", - ], - ] - `); - }); -}); diff --git a/src/core/server/elasticsearch/legacy/retry_call_cluster.ts b/src/core/server/elasticsearch/legacy/retry_call_cluster.ts deleted file mode 100644 index 1b05cb2bf13cd..0000000000000 --- a/src/core/server/elasticsearch/legacy/retry_call_cluster.ts +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { retryWhen, concatMap } from 'rxjs/operators'; -import { defer, throwError, iif, timer } from 'rxjs'; -import * as legacyElasticsearch from 'elasticsearch'; - -import { LegacyCallAPIOptions } from '.'; -import { LegacyAPICaller } from './api_types'; -import { Logger } from '../../logging'; - -const esErrors = legacyElasticsearch.errors; - -/** - * Retries the provided Elasticsearch API call when an error such as - * `AuthenticationException` `NoConnections`, `ConnectionFault`, - * `ServiceUnavailable` or `RequestTimeout` are encountered. The API call will - * be retried once a second, indefinitely, until a successful response or a - * different error is received. - * - * @param apiCaller - * @param log - * @param delay - */ -export function migrationsRetryCallCluster( - apiCaller: LegacyAPICaller, - log: Logger, - delay: number = 2500 -) { - const previousErrors: string[] = []; - return ( - endpoint: string, - clientParams: Record = {}, - options?: LegacyCallAPIOptions - ) => { - return defer(() => apiCaller(endpoint, clientParams, options)) - .pipe( - retryWhen((error$) => - error$.pipe( - concatMap((error) => { - if (!previousErrors.includes(error.message)) { - log.warn(`Unable to connect to Elasticsearch. Error: ${error.message}`); - previousErrors.push(error.message); - } - return iif( - () => { - return ( - error instanceof esErrors.NoConnections || - error instanceof esErrors.ConnectionFault || - error instanceof esErrors.ServiceUnavailable || - error instanceof esErrors.RequestTimeout || - error instanceof esErrors.AuthenticationException || - error instanceof esErrors.AuthorizationException || - // @ts-expect-error - error instanceof esErrors.Gone || - error?.body?.error?.type === 'snapshot_in_progress_exception' - ); - }, - timer(delay), - throwError(error) - ); - }) - ) - ) - ) - .toPromise(); - }; -} - -/** - * Retries the provided Elasticsearch API call when a `NoConnections` error is - * encountered. The API call will be retried once a second, indefinitely, until - * a successful response or a different error is received. - * - * @param apiCaller - */ -export function retryCallCluster(apiCaller: LegacyAPICaller) { - return ( - endpoint: string, - clientParams: Record = {}, - options?: LegacyCallAPIOptions - ) => { - return defer(() => apiCaller(endpoint, clientParams, options)) - .pipe( - retryWhen((errors) => - errors.pipe( - concatMap((error) => - iif( - () => error instanceof legacyElasticsearch.errors.NoConnections, - timer(1000), - throwError(error) - ) - ) - ) - ) - ) - .toPromise(); - }; -} diff --git a/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts b/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts index 21adac081acf7..f6313f68abff2 100644 --- a/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts +++ b/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts @@ -28,8 +28,8 @@ const mockLogger = mockLoggerFactory.get('mock logger'); const KIBANA_VERSION = '5.1.0'; -const createEsSuccess = elasticsearchClientMock.createClientResponse; -const createEsError = elasticsearchClientMock.createClientError; +const createEsSuccess = elasticsearchClientMock.createSuccessTransportRequestPromise; +const createEsError = elasticsearchClientMock.createErrorTransportRequestPromise; function createNodes(...versions: string[]): NodesInfo { const nodes = {} as any; diff --git a/src/core/server/http/integration_tests/core_services.test.ts b/src/core/server/http/integration_tests/core_services.test.ts index 6338326626d54..6a00db5a6cc4a 100644 --- a/src/core/server/http/integration_tests/core_services.test.ts +++ b/src/core/server/http/integration_tests/core_services.test.ts @@ -479,7 +479,7 @@ describe('http service', () => { let elasticsearch: InternalElasticsearchServiceStart; esClient.ping.mockImplementation(() => - elasticsearchClientMock.createClientError( + elasticsearchClientMock.createErrorTransportRequestPromise( new ResponseError({ statusCode: 401, body: { @@ -517,7 +517,7 @@ describe('http service', () => { let elasticsearch: InternalElasticsearchServiceStart; esClient.ping.mockImplementation(() => - elasticsearchClientMock.createClientError( + elasticsearchClientMock.createErrorTransportRequestPromise( new ResponseError({ statusCode: 401, body: { diff --git a/src/core/server/index.ts b/src/core/server/index.ts index 706ec88c6ebfd..c846e81573acb 100644 --- a/src/core/server/index.ts +++ b/src/core/server/index.ts @@ -109,6 +109,7 @@ export { LegacyAPICaller, FakeRequest, ScopeableRequest, + ElasticsearchClient, } from './elasticsearch'; export * from './elasticsearch/legacy/api_types'; export { diff --git a/src/core/server/saved_objects/migrations/core/__snapshots__/elastic_index.test.ts.snap b/src/core/server/saved_objects/migrations/core/__snapshots__/elastic_index.test.ts.snap index 76bcc6ee219d9..6bd567be204d0 100644 --- a/src/core/server/saved_objects/migrations/core/__snapshots__/elastic_index.test.ts.snap +++ b/src/core/server/saved_objects/migrations/core/__snapshots__/elastic_index.test.ts.snap @@ -2,7 +2,6 @@ exports[`ElasticIndex write writes documents in bulk to the index 1`] = ` Array [ - "bulk", Object { "body": Array [ Object { diff --git a/src/core/server/saved_objects/migrations/core/elastic_index.test.ts b/src/core/server/saved_objects/migrations/core/elastic_index.test.ts index 393cbb7fbb2ae..fb8fb4ef95081 100644 --- a/src/core/server/saved_objects/migrations/core/elastic_index.test.ts +++ b/src/core/server/saved_objects/migrations/core/elastic_index.test.ts @@ -18,47 +18,52 @@ */ import _ from 'lodash'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; import * as Index from './elastic_index'; describe('ElasticIndex', () => { + let client: ReturnType; + + beforeEach(() => { + client = elasticsearchClientMock.createElasticSearchClient(); + }); describe('fetchInfo', () => { test('it handles 404', async () => { - const callCluster = jest - .fn() - .mockImplementation(async (path: string, { ignore, index }: any) => { - expect(path).toEqual('indices.get'); - expect(ignore).toEqual([404]); - expect(index).toEqual('.kibana-test'); - return { status: 404 }; - }); + client.indices.get.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); - const info = await Index.fetchInfo(callCluster as any, '.kibana-test'); + const info = await Index.fetchInfo(client, '.kibana-test'); expect(info).toEqual({ aliases: {}, exists: false, indexName: '.kibana-test', mappings: { dynamic: 'strict', properties: {} }, }); + + expect(client.indices.get).toHaveBeenCalledWith({ index: '.kibana-test' }, { ignore: [404] }); }); test('fails if the index doc type is unsupported', async () => { - const callCluster = jest.fn(async (path: string, { index }: any) => { - return { + client.indices.get.mockImplementation((params) => { + const index = params!.index as string; + return elasticsearchClientMock.createSuccessTransportRequestPromise({ [index]: { aliases: { foo: index }, mappings: { spock: { dynamic: 'strict', properties: { a: 'b' } } }, }, - }; + }); }); - await expect(Index.fetchInfo(callCluster as any, '.baz')).rejects.toThrow( + await expect(Index.fetchInfo(client, '.baz')).rejects.toThrow( /cannot be automatically migrated/ ); }); test('fails if there are multiple root types', async () => { - const callCluster = jest.fn().mockImplementation(async (path: string, { index }: any) => { - return { + client.indices.get.mockImplementation((params) => { + const index = params!.index as string; + return elasticsearchClientMock.createSuccessTransportRequestPromise({ [index]: { aliases: { foo: index }, mappings: { @@ -66,25 +71,26 @@ describe('ElasticIndex', () => { doctor: { dynamic: 'strict', properties: { a: 'b' } }, }, }, - }; + }); }); - await expect(Index.fetchInfo(callCluster, '.baz')).rejects.toThrow( + await expect(Index.fetchInfo(client, '.baz')).rejects.toThrow( /cannot be automatically migrated/ ); }); test('decorates index info with exists and indexName', async () => { - const callCluster = jest.fn().mockImplementation(async (path: string, { index }: any) => { - return { + client.indices.get.mockImplementation((params) => { + const index = params!.index as string; + return elasticsearchClientMock.createSuccessTransportRequestPromise({ [index]: { aliases: { foo: index }, mappings: { dynamic: 'strict', properties: { a: 'b' } }, }, - }; + }); }); - const info = await Index.fetchInfo(callCluster, '.baz'); + const info = await Index.fetchInfo(client, '.baz'); expect(info).toEqual({ aliases: { foo: '.baz' }, mappings: { dynamic: 'strict', properties: { a: 'b' } }, @@ -96,171 +102,120 @@ describe('ElasticIndex', () => { describe('createIndex', () => { test('calls indices.create', async () => { - const callCluster = jest.fn(async (path: string, { body, index }: any) => { - expect(path).toEqual('indices.create'); - expect(body).toEqual({ + await Index.createIndex(client, '.abcd', { foo: 'bar' } as any); + + expect(client.indices.create).toHaveBeenCalledTimes(1); + expect(client.indices.create).toHaveBeenCalledWith({ + body: { mappings: { foo: 'bar' }, - settings: { auto_expand_replicas: '0-1', number_of_shards: 1 }, - }); - expect(index).toEqual('.abcd'); + settings: { + auto_expand_replicas: '0-1', + number_of_shards: 1, + }, + }, + index: '.abcd', }); - - await Index.createIndex(callCluster as any, '.abcd', { foo: 'bar' } as any); - expect(callCluster).toHaveBeenCalled(); }); }); describe('deleteIndex', () => { test('calls indices.delete', async () => { - const callCluster = jest.fn(async (path: string, { index }: any) => { - expect(path).toEqual('indices.delete'); - expect(index).toEqual('.lotr'); - }); + await Index.deleteIndex(client, '.lotr'); - await Index.deleteIndex(callCluster as any, '.lotr'); - expect(callCluster).toHaveBeenCalled(); + expect(client.indices.delete).toHaveBeenCalledTimes(1); + expect(client.indices.delete).toHaveBeenCalledWith({ + index: '.lotr', + }); }); }); describe('claimAlias', () => { - function assertCalled(callCluster: jest.Mock) { - expect(callCluster.mock.calls.map(([path]) => path)).toEqual([ - 'indices.getAlias', - 'indices.updateAliases', - 'indices.refresh', - ]); - } - test('handles unaliased indices', async () => { - const callCluster = jest.fn(async (path: string, arg: any) => { - switch (path) { - case 'indices.getAlias': - expect(arg.ignore).toEqual([404]); - expect(arg.name).toEqual('.hola'); - return { status: 404 }; - case 'indices.updateAliases': - expect(arg.body).toEqual({ - actions: [{ add: { index: '.hola-42', alias: '.hola' } }], - }); - return true; - case 'indices.refresh': - expect(arg.index).toEqual('.hola-42'); - return true; - default: - throw new Error(`Dunnoes what ${path} means.`); - } - }); + client.indices.getAlias.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); - await Index.claimAlias(callCluster as any, '.hola-42', '.hola'); + await Index.claimAlias(client, '.hola-42', '.hola'); - assertCalled(callCluster); + expect(client.indices.getAlias).toHaveBeenCalledWith( + { + name: '.hola', + }, + { ignore: [404] } + ); + expect(client.indices.updateAliases).toHaveBeenCalledWith({ + body: { + actions: [{ add: { index: '.hola-42', alias: '.hola' } }], + }, + }); + expect(client.indices.refresh).toHaveBeenCalledWith({ + index: '.hola-42', + }); }); test('removes existing alias', async () => { - const callCluster = jest.fn(async (path: string, arg: any) => { - switch (path) { - case 'indices.getAlias': - return { '.my-fanci-index': '.muchacha' }; - case 'indices.updateAliases': - expect(arg.body).toEqual({ - actions: [ - { remove: { index: '.my-fanci-index', alias: '.muchacha' } }, - { add: { index: '.ze-index', alias: '.muchacha' } }, - ], - }); - return true; - case 'indices.refresh': - expect(arg.index).toEqual('.ze-index'); - return true; - default: - throw new Error(`Dunnoes what ${path} means.`); - } - }); + client.indices.getAlias.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + '.my-fanci-index': '.muchacha', + }) + ); - await Index.claimAlias(callCluster as any, '.ze-index', '.muchacha'); + await Index.claimAlias(client, '.ze-index', '.muchacha'); - assertCalled(callCluster); + expect(client.indices.getAlias).toHaveBeenCalledTimes(1); + expect(client.indices.updateAliases).toHaveBeenCalledWith({ + body: { + actions: [ + { remove: { index: '.my-fanci-index', alias: '.muchacha' } }, + { add: { index: '.ze-index', alias: '.muchacha' } }, + ], + }, + }); + expect(client.indices.refresh).toHaveBeenCalledWith({ + index: '.ze-index', + }); }); test('allows custom alias actions', async () => { - const callCluster = jest.fn(async (path: string, arg: any) => { - switch (path) { - case 'indices.getAlias': - return { '.my-fanci-index': '.muchacha' }; - case 'indices.updateAliases': - expect(arg.body).toEqual({ - actions: [ - { remove_index: { index: 'awww-snap!' } }, - { remove: { index: '.my-fanci-index', alias: '.muchacha' } }, - { add: { index: '.ze-index', alias: '.muchacha' } }, - ], - }); - return true; - case 'indices.refresh': - expect(arg.index).toEqual('.ze-index'); - return true; - default: - throw new Error(`Dunnoes what ${path} means.`); - } - }); + client.indices.getAlias.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + '.my-fanci-index': '.muchacha', + }) + ); - await Index.claimAlias(callCluster as any, '.ze-index', '.muchacha', [ + await Index.claimAlias(client, '.ze-index', '.muchacha', [ { remove_index: { index: 'awww-snap!' } }, ]); - assertCalled(callCluster); + expect(client.indices.getAlias).toHaveBeenCalledTimes(1); + expect(client.indices.updateAliases).toHaveBeenCalledWith({ + body: { + actions: [ + { remove_index: { index: 'awww-snap!' } }, + { remove: { index: '.my-fanci-index', alias: '.muchacha' } }, + { add: { index: '.ze-index', alias: '.muchacha' } }, + ], + }, + }); + expect(client.indices.refresh).toHaveBeenCalledWith({ + index: '.ze-index', + }); }); }); describe('convertToAlias', () => { test('it creates the destination index, then reindexes to it', async () => { - const callCluster = jest.fn(async (path: string, arg: any) => { - switch (path) { - case 'indices.create': - expect(arg.body).toEqual({ - mappings: { - dynamic: 'strict', - properties: { foo: { type: 'keyword' } }, - }, - settings: { auto_expand_replicas: '0-1', number_of_shards: 1 }, - }); - expect(arg.index).toEqual('.ze-index'); - return true; - case 'reindex': - expect(arg).toMatchObject({ - body: { - dest: { index: '.ze-index' }, - source: { index: '.muchacha' }, - script: { - source: `ctx._id = ctx._source.type + ':' + ctx._id`, - lang: 'painless', - }, - }, - refresh: true, - waitForCompletion: false, - }); - return { task: 'abc' }; - case 'tasks.get': - expect(arg.taskId).toEqual('abc'); - return { completed: true }; - case 'indices.getAlias': - return { '.my-fanci-index': '.muchacha' }; - case 'indices.updateAliases': - expect(arg.body).toEqual({ - actions: [ - { remove_index: { index: '.muchacha' } }, - { remove: { alias: '.muchacha', index: '.my-fanci-index' } }, - { add: { index: '.ze-index', alias: '.muchacha' } }, - ], - }); - return true; - case 'indices.refresh': - expect(arg.index).toEqual('.ze-index'); - return true; - default: - throw new Error(`Dunnoes what ${path} means.`); - } - }); + client.indices.getAlias.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + '.my-fanci-index': '.muchacha', + }) + ); + client.reindex.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ task: 'abc' }) + ); + client.tasks.get.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ completed: true }) + ); const info = { aliases: {}, @@ -271,61 +226,77 @@ describe('ElasticIndex', () => { properties: { foo: { type: 'keyword' } }, }, }; + await Index.convertToAlias( - callCluster as any, + client, info, '.muchacha', 10, `ctx._id = ctx._source.type + ':' + ctx._id` ); - expect(callCluster.mock.calls.map(([path]) => path)).toEqual([ - 'indices.create', - 'reindex', - 'tasks.get', - 'indices.getAlias', - 'indices.updateAliases', - 'indices.refresh', - ]); + expect(client.indices.create).toHaveBeenCalledWith({ + body: { + mappings: { + dynamic: 'strict', + properties: { foo: { type: 'keyword' } }, + }, + settings: { auto_expand_replicas: '0-1', number_of_shards: 1 }, + }, + index: '.ze-index', + }); + + expect(client.reindex).toHaveBeenCalledWith({ + body: { + dest: { index: '.ze-index' }, + source: { index: '.muchacha', size: 10 }, + script: { + source: `ctx._id = ctx._source.type + ':' + ctx._id`, + lang: 'painless', + }, + }, + refresh: true, + wait_for_completion: false, + }); + + expect(client.tasks.get).toHaveBeenCalledWith({ + task_id: 'abc', + }); + + expect(client.indices.updateAliases).toHaveBeenCalledWith({ + body: { + actions: [ + { remove_index: { index: '.muchacha' } }, + { remove: { alias: '.muchacha', index: '.my-fanci-index' } }, + { add: { index: '.ze-index', alias: '.muchacha' } }, + ], + }, + }); + + expect(client.indices.refresh).toHaveBeenCalledWith({ + index: '.ze-index', + }); }); test('throws error if re-index task fails', async () => { - const callCluster = jest.fn(async (path: string, arg: any) => { - switch (path) { - case 'indices.create': - expect(arg.body).toEqual({ - mappings: { - dynamic: 'strict', - properties: { foo: { type: 'keyword' } }, - }, - settings: { auto_expand_replicas: '0-1', number_of_shards: 1 }, - }); - expect(arg.index).toEqual('.ze-index'); - return true; - case 'reindex': - expect(arg).toMatchObject({ - body: { - dest: { index: '.ze-index' }, - source: { index: '.muchacha' }, - }, - refresh: true, - waitForCompletion: false, - }); - return { task: 'abc' }; - case 'tasks.get': - expect(arg.taskId).toEqual('abc'); - return { - completed: true, - error: { - type: 'search_phase_execution_exception', - reason: 'all shards failed', - failed_shards: [], - }, - }; - default: - throw new Error(`Dunnoes what ${path} means.`); - } - }); + client.indices.getAlias.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + '.my-fanci-index': '.muchacha', + }) + ); + client.reindex.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ task: 'abc' }) + ); + client.tasks.get.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + completed: true, + error: { + type: 'search_phase_execution_exception', + reason: 'all shards failed', + failed_shards: [], + }, + }) + ); const info = { aliases: {}, @@ -336,22 +307,44 @@ describe('ElasticIndex', () => { properties: { foo: { type: 'keyword' } }, }, }; - await expect(Index.convertToAlias(callCluster as any, info, '.muchacha', 10)).rejects.toThrow( + + await expect(Index.convertToAlias(client, info, '.muchacha', 10)).rejects.toThrow( /Re-index failed \[search_phase_execution_exception\] all shards failed/ ); - expect(callCluster.mock.calls.map(([path]) => path)).toEqual([ - 'indices.create', - 'reindex', - 'tasks.get', - ]); + expect(client.indices.create).toHaveBeenCalledWith({ + body: { + mappings: { + dynamic: 'strict', + properties: { foo: { type: 'keyword' } }, + }, + settings: { auto_expand_replicas: '0-1', number_of_shards: 1 }, + }, + index: '.ze-index', + }); + + expect(client.reindex).toHaveBeenCalledWith({ + body: { + dest: { index: '.ze-index' }, + source: { index: '.muchacha', size: 10 }, + }, + refresh: true, + wait_for_completion: false, + }); + + expect(client.tasks.get).toHaveBeenCalledWith({ + task_id: 'abc', + }); }); }); describe('write', () => { test('writes documents in bulk to the index', async () => { + client.bulk.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ items: [] }) + ); + const index = '.myalias'; - const callCluster = jest.fn().mockResolvedValue({ items: [] }); const docs = [ { _id: 'niceguy:fredrogers', @@ -375,19 +368,20 @@ describe('ElasticIndex', () => { }, ]; - await Index.write(callCluster, index, docs); + await Index.write(client, index, docs); - expect(callCluster).toHaveBeenCalled(); - expect(callCluster.mock.calls[0]).toMatchSnapshot(); + expect(client.bulk).toHaveBeenCalled(); + expect(client.bulk.mock.calls[0]).toMatchSnapshot(); }); test('fails if any document fails', async () => { - const index = '.myalias'; - const callCluster = jest.fn(() => - Promise.resolve({ + client.bulk.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ items: [{ index: { error: { type: 'shazm', reason: 'dern' } } }], }) ); + + const index = '.myalias'; const docs = [ { _id: 'niceguy:fredrogers', @@ -400,23 +394,20 @@ describe('ElasticIndex', () => { }, ]; - await expect(Index.write(callCluster as any, index, docs)).rejects.toThrow(/dern/); - expect(callCluster).toHaveBeenCalled(); + await expect(Index.write(client as any, index, docs)).rejects.toThrow(/dern/); + expect(client.bulk).toHaveBeenCalledTimes(1); }); }); describe('reader', () => { test('returns docs in batches', async () => { const index = '.myalias'; - const callCluster = jest.fn(); - const batch1 = [ { _id: 'such:1', _source: { type: 'such', such: { num: 1 } }, }, ]; - const batch2 = [ { _id: 'aaa:2', @@ -432,42 +423,56 @@ describe('ElasticIndex', () => { }, ]; - callCluster - .mockResolvedValueOnce({ + client.search = jest.fn().mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ _scroll_id: 'x', _shards: { success: 1, total: 1 }, hits: { hits: _.cloneDeep(batch1) }, }) - .mockResolvedValueOnce({ - _scroll_id: 'y', - _shards: { success: 1, total: 1 }, - hits: { hits: _.cloneDeep(batch2) }, - }) - .mockResolvedValueOnce({ - _scroll_id: 'z', - _shards: { success: 1, total: 1 }, - hits: { hits: [] }, - }) - .mockResolvedValue({}); - - const read = Index.reader(callCluster, index, { batchSize: 100, scrollDuration: '5m' }); + ); + client.scroll = jest + .fn() + .mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _scroll_id: 'y', + _shards: { success: 1, total: 1 }, + hits: { hits: _.cloneDeep(batch2) }, + }) + ) + .mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _scroll_id: 'z', + _shards: { success: 1, total: 1 }, + hits: { hits: [] }, + }) + ); + + const read = Index.reader(client, index, { batchSize: 100, scrollDuration: '5m' }); expect(await read()).toEqual(batch1); expect(await read()).toEqual(batch2); expect(await read()).toEqual([]); - // Check order of calls, as well as args - expect(callCluster.mock.calls).toEqual([ - ['search', { body: { size: 100 }, index, scroll: '5m' }], - ['scroll', { scroll: '5m', scrollId: 'x' }], - ['scroll', { scroll: '5m', scrollId: 'y' }], - ['clearScroll', { scrollId: 'z' }], - ]); + expect(client.search).toHaveBeenCalledWith({ + body: { size: 100 }, + index, + scroll: '5m', + }); + expect(client.scroll).toHaveBeenCalledWith({ + scroll: '5m', + scroll_id: 'x', + }); + expect(client.scroll).toHaveBeenCalledWith({ + scroll: '5m', + scroll_id: 'y', + }); + expect(client.clearScroll).toHaveBeenCalledWith({ + scroll_id: 'z', + }); }); test('returns all root-level properties', async () => { const index = '.myalias'; - const callCluster = jest.fn(); const batch = [ { _id: 'such:1', @@ -480,19 +485,22 @@ describe('ElasticIndex', () => { }, ]; - callCluster - .mockResolvedValueOnce({ + client.search = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ _scroll_id: 'x', _shards: { success: 1, total: 1 }, hits: { hits: _.cloneDeep(batch) }, }) - .mockResolvedValue({ + ); + client.scroll = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ _scroll_id: 'z', _shards: { success: 1, total: 1 }, hits: { hits: [] }, - }); + }) + ); - const read = Index.reader(callCluster, index, { + const read = Index.reader(client, index, { batchSize: 100, scrollDuration: '5m', }); @@ -502,11 +510,14 @@ describe('ElasticIndex', () => { test('fails if not all shards were successful', async () => { const index = '.myalias'; - const callCluster = jest.fn(); - callCluster.mockResolvedValue({ _shards: { successful: 1, total: 2 } }); + client.search = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _shards: { successful: 1, total: 2 }, + }) + ); - const read = Index.reader(callCluster, index, { + const read = Index.reader(client, index, { batchSize: 100, scrollDuration: '5m', }); @@ -516,7 +527,6 @@ describe('ElasticIndex', () => { test('handles shards not being returned', async () => { const index = '.myalias'; - const callCluster = jest.fn(); const batch = [ { _id: 'such:1', @@ -529,11 +539,20 @@ describe('ElasticIndex', () => { }, ]; - callCluster - .mockResolvedValueOnce({ _scroll_id: 'x', hits: { hits: _.cloneDeep(batch) } }) - .mockResolvedValue({ _scroll_id: 'z', hits: { hits: [] } }); + client.search = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _scroll_id: 'x', + hits: { hits: _.cloneDeep(batch) }, + }) + ); + client.scroll = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _scroll_id: 'z', + hits: { hits: [] }, + }) + ); - const read = Index.reader(callCluster, index, { + const read = Index.reader(client, index, { batchSize: 100, scrollDuration: '5m', }); @@ -550,23 +569,24 @@ describe('ElasticIndex', () => { count, migrations, }: any) { - const callCluster = jest.fn(async (path: string) => { - if (path === 'indices.get') { - return { - [index]: { mappings }, - }; - } - if (path === 'count') { - return { count, _shards: { success: 1, total: 1 } }; - } - throw new Error(`Unknown command ${path}.`); - }); - const hasMigrations = await Index.migrationsUpToDate(callCluster as any, index, migrations); - return { hasMigrations, callCluster }; + client.indices.get = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + [index]: { mappings }, + }) + ); + client.count = jest.fn().mockReturnValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + count, + _shards: { success: 1, total: 1 }, + }) + ); + + const hasMigrations = await Index.migrationsUpToDate(client, index, migrations); + return { hasMigrations }; } test('is false if the index mappings do not contain migrationVersion', async () => { - const { hasMigrations, callCluster } = await testMigrationsUpToDate({ + const { hasMigrations } = await testMigrationsUpToDate({ index: '.myalias', mappings: { properties: { @@ -578,17 +598,18 @@ describe('ElasticIndex', () => { }); expect(hasMigrations).toBeFalsy(); - expect(callCluster.mock.calls[0]).toEqual([ - 'indices.get', + expect(client.indices.get).toHaveBeenCalledWith( { - ignore: [404], index: '.myalias', }, - ]); + { + ignore: [404], + } + ); }); test('is true if there are no migrations defined', async () => { - const { hasMigrations, callCluster } = await testMigrationsUpToDate({ + const { hasMigrations } = await testMigrationsUpToDate({ index: '.myalias', mappings: { properties: { @@ -604,12 +625,11 @@ describe('ElasticIndex', () => { }); expect(hasMigrations).toBeTruthy(); - expect(callCluster).toHaveBeenCalled(); - expect(callCluster.mock.calls[0][0]).toEqual('indices.get'); + expect(client.indices.get).toHaveBeenCalledTimes(1); }); test('is true if there are no documents out of date', async () => { - const { hasMigrations, callCluster } = await testMigrationsUpToDate({ + const { hasMigrations } = await testMigrationsUpToDate({ index: '.myalias', mappings: { properties: { @@ -625,13 +645,12 @@ describe('ElasticIndex', () => { }); expect(hasMigrations).toBeTruthy(); - expect(callCluster).toHaveBeenCalled(); - expect(callCluster.mock.calls[0][0]).toEqual('indices.get'); - expect(callCluster.mock.calls[1][0]).toEqual('count'); + expect(client.indices.get).toHaveBeenCalledTimes(1); + expect(client.count).toHaveBeenCalledTimes(1); }); test('is false if there are documents out of date', async () => { - const { hasMigrations, callCluster } = await testMigrationsUpToDate({ + const { hasMigrations } = await testMigrationsUpToDate({ index: '.myalias', mappings: { properties: { @@ -647,12 +666,12 @@ describe('ElasticIndex', () => { }); expect(hasMigrations).toBeFalsy(); - expect(callCluster.mock.calls[0][0]).toEqual('indices.get'); - expect(callCluster.mock.calls[1][0]).toEqual('count'); + expect(client.indices.get).toHaveBeenCalledTimes(1); + expect(client.count).toHaveBeenCalledTimes(1); }); test('counts docs that are out of date', async () => { - const { callCluster } = await testMigrationsUpToDate({ + await testMigrationsUpToDate({ index: '.myalias', mappings: { properties: { @@ -686,23 +705,20 @@ describe('ElasticIndex', () => { }; } - expect(callCluster.mock.calls[1]).toEqual([ - 'count', - { - body: { - query: { - bool: { - should: [ - shouldClause('dashy', '23.2.5'), - shouldClause('bashy', '99.9.3'), - shouldClause('flashy', '3.4.5'), - ], - }, + expect(client.count).toHaveBeenCalledWith({ + body: { + query: { + bool: { + should: [ + shouldClause('dashy', '23.2.5'), + shouldClause('bashy', '99.9.3'), + shouldClause('flashy', '3.4.5'), + ], }, }, - index: '.myalias', }, - ]); + index: '.myalias', + }); }); }); }); diff --git a/src/core/server/saved_objects/migrations/core/elastic_index.ts b/src/core/server/saved_objects/migrations/core/elastic_index.ts index e87c3e3ff0d64..d5093bfd8dc42 100644 --- a/src/core/server/saved_objects/migrations/core/elastic_index.ts +++ b/src/core/server/saved_objects/migrations/core/elastic_index.ts @@ -23,9 +23,12 @@ */ import _ from 'lodash'; +import { MigrationEsClient } from './migration_es_client'; +import { CountResponse, SearchResponse } from '../../../elasticsearch'; import { IndexMapping } from '../../mappings'; import { SavedObjectsMigrationVersion } from '../../types'; -import { AliasAction, CallCluster, NotFound, RawDoc, ShardsInfo } from './call_cluster'; +import { AliasAction, RawDoc, ShardsInfo } from './call_cluster'; +import { SavedObjectsRawDocSource } from '../../serialization'; const settings = { number_of_shards: 1, auto_expand_replicas: '0-1' }; @@ -40,13 +43,10 @@ export interface FullIndexInfo { * A slight enhancement to indices.get, that adds indexName, and validates that the * index mappings are somewhat what we expect. */ -export async function fetchInfo(callCluster: CallCluster, index: string): Promise { - const result = await callCluster('indices.get', { - ignore: [404], - index, - }); +export async function fetchInfo(client: MigrationEsClient, index: string): Promise { + const { body, statusCode } = await client.indices.get({ index }, { ignore: [404] }); - if ((result as NotFound).status === 404) { + if (statusCode === 404) { return { aliases: {}, exists: false, @@ -55,7 +55,7 @@ export async function fetchInfo(callCluster: CallCluster, index: string): Promis }; } - const [indexName, indexInfo] = Object.entries(result)[0]; + const [indexName, indexInfo] = Object.entries(body)[0]; return assertIsSupportedIndex({ ...indexInfo, exists: true, indexName }); } @@ -71,7 +71,7 @@ export async function fetchInfo(callCluster: CallCluster, index: string): Promis * @prop {string} scrollDuration - The scroll duration used for scrolling through the index */ export function reader( - callCluster: CallCluster, + client: MigrationEsClient, index: string, { batchSize = 10, scrollDuration = '15m' }: { batchSize: number; scrollDuration: string } ) { @@ -80,19 +80,24 @@ export function reader( const nextBatch = () => scrollId !== undefined - ? callCluster('scroll', { scroll, scrollId }) - : callCluster('search', { body: { size: batchSize }, index, scroll }); - - const close = async () => scrollId && (await callCluster('clearScroll', { scrollId })); + ? client.scroll>({ + scroll, + scroll_id: scrollId, + }) + : client.search>({ + body: { size: batchSize }, + index, + scroll, + }); + + const close = async () => scrollId && (await client.clearScroll({ scroll_id: scrollId })); return async function read() { const result = await nextBatch(); - assertResponseIncludeAllShards(result); - - const docs = result.hits.hits; - - scrollId = result._scroll_id; + assertResponseIncludeAllShards(result.body); + scrollId = result.body._scroll_id; + const docs = result.body.hits.hits; if (!docs.length) { await close(); } @@ -109,8 +114,8 @@ export function reader( * @param {string} index * @param {RawDoc[]} docs */ -export async function write(callCluster: CallCluster, index: string, docs: RawDoc[]) { - const result = await callCluster('bulk', { +export async function write(client: MigrationEsClient, index: string, docs: RawDoc[]) { + const { body } = await client.bulk({ body: docs.reduce((acc: object[], doc: RawDoc) => { acc.push({ index: { @@ -125,7 +130,7 @@ export async function write(callCluster: CallCluster, index: string, docs: RawDo }, []), }); - const err = _.find(result.items, 'index.error.reason'); + const err = _.find(body.items, 'index.error.reason'); if (!err) { return; @@ -150,15 +155,15 @@ export async function write(callCluster: CallCluster, index: string, docs: RawDo * @param {SavedObjectsMigrationVersion} migrationVersion - The latest versions of the migrations */ export async function migrationsUpToDate( - callCluster: CallCluster, + client: MigrationEsClient, index: string, migrationVersion: SavedObjectsMigrationVersion, retryCount: number = 10 ): Promise { try { - const indexInfo = await fetchInfo(callCluster, index); + const indexInfo = await fetchInfo(client, index); - if (!_.get(indexInfo, 'mappings.properties.migrationVersion')) { + if (!indexInfo.mappings.properties?.migrationVersion) { return false; } @@ -167,7 +172,7 @@ export async function migrationsUpToDate( return true; } - const response = await callCluster('count', { + const { body } = await client.count({ body: { query: { bool: { @@ -175,7 +180,11 @@ export async function migrationsUpToDate( bool: { must: [ { exists: { field: type } }, - { bool: { must_not: { term: { [`migrationVersion.${type}`]: latestVersion } } } }, + { + bool: { + must_not: { term: { [`migrationVersion.${type}`]: latestVersion } }, + }, + }, ], }, })), @@ -185,9 +194,9 @@ export async function migrationsUpToDate( index, }); - assertResponseIncludeAllShards(response); + assertResponseIncludeAllShards(body); - return response.count === 0; + return body.count === 0; } catch (e) { // retry for Service Unavailable if (e.status !== 503 || retryCount === 0) { @@ -196,23 +205,23 @@ export async function migrationsUpToDate( await new Promise((r) => setTimeout(r, 1000)); - return await migrationsUpToDate(callCluster, index, migrationVersion, retryCount - 1); + return await migrationsUpToDate(client, index, migrationVersion, retryCount - 1); } } export async function createIndex( - callCluster: CallCluster, + client: MigrationEsClient, index: string, mappings?: IndexMapping ) { - await callCluster('indices.create', { + await client.indices.create({ body: { mappings, settings }, index, }); } -export async function deleteIndex(callCluster: CallCluster, index: string) { - await callCluster('indices.delete', { index }); +export async function deleteIndex(client: MigrationEsClient, index: string) { + await client.indices.delete({ index }); } /** @@ -225,20 +234,20 @@ export async function deleteIndex(callCluster: CallCluster, index: string) { * @param {string} alias - The name of the index being converted to an alias */ export async function convertToAlias( - callCluster: CallCluster, + client: MigrationEsClient, info: FullIndexInfo, alias: string, batchSize: number, script?: string ) { - await callCluster('indices.create', { + await client.indices.create({ body: { mappings: info.mappings, settings }, index: info.indexName, }); - await reindex(callCluster, alias, info.indexName, batchSize, script); + await reindex(client, alias, info.indexName, batchSize, script); - await claimAlias(callCluster, info.indexName, alias, [{ remove_index: { index: alias } }]); + await claimAlias(client, info.indexName, alias, [{ remove_index: { index: alias } }]); } /** @@ -252,22 +261,22 @@ export async function convertToAlias( * @param {AliasAction[]} aliasActions - Optional actions to be added to the updateAliases call */ export async function claimAlias( - callCluster: CallCluster, + client: MigrationEsClient, index: string, alias: string, aliasActions: AliasAction[] = [] ) { - const result = await callCluster('indices.getAlias', { ignore: [404], name: alias }); - const aliasInfo = (result as NotFound).status === 404 ? {} : result; + const { body, statusCode } = await client.indices.getAlias({ name: alias }, { ignore: [404] }); + const aliasInfo = statusCode === 404 ? {} : body; const removeActions = Object.keys(aliasInfo).map((key) => ({ remove: { index: key, alias } })); - await callCluster('indices.updateAliases', { + await client.indices.updateAliases({ body: { actions: aliasActions.concat(removeActions).concat({ add: { index, alias } }), }, }); - await callCluster('indices.refresh', { index }); + await client.indices.refresh({ index }); } /** @@ -318,7 +327,7 @@ function assertResponseIncludeAllShards({ _shards }: { _shards: ShardsInfo }) { * Reindexes from source to dest, polling for the reindex completion. */ async function reindex( - callCluster: CallCluster, + client: MigrationEsClient, source: string, dest: string, batchSize: number, @@ -329,7 +338,7 @@ async function reindex( // polling interval, as the request is fairly efficent, and we don't // want to block index migrations for too long on this. const pollInterval = 250; - const { task } = await callCluster('reindex', { + const { body: reindexBody } = await client.reindex({ body: { dest: { index: dest }, source: { index: source, size: batchSize }, @@ -341,23 +350,25 @@ async function reindex( : undefined, }, refresh: true, - waitForCompletion: false, + wait_for_completion: false, }); + const task = reindexBody.task; + let completed = false; while (!completed) { await new Promise((r) => setTimeout(r, pollInterval)); - completed = await callCluster('tasks.get', { - taskId: task, - }).then((result) => { - if (result.error) { - const e = result.error; - throw new Error(`Re-index failed [${e.type}] ${e.reason} :: ${JSON.stringify(e)}`); - } - - return result.completed; + const { body } = await client.tasks.get({ + task_id: task, }); + + if (body.error) { + const e = body.error; + throw new Error(`Re-index failed [${e.type}] ${e.reason} :: ${JSON.stringify(e)}`); + } + + completed = body.completed; } } diff --git a/src/core/server/saved_objects/migrations/core/index.ts b/src/core/server/saved_objects/migrations/core/index.ts index f7274740ea5fe..c9d3d2a71c9ad 100644 --- a/src/core/server/saved_objects/migrations/core/index.ts +++ b/src/core/server/saved_objects/migrations/core/index.ts @@ -23,3 +23,4 @@ export { buildActiveMappings } from './build_active_mappings'; export { CallCluster } from './call_cluster'; export { LogFn, SavedObjectsMigrationLogger } from './migration_logger'; export { MigrationResult, MigrationStatus } from './migration_coordinator'; +export { createMigrationEsClient, MigrationEsClient } from './migration_es_client'; diff --git a/src/core/server/saved_objects/migrations/core/index_migrator.test.ts b/src/core/server/saved_objects/migrations/core/index_migrator.test.ts index f8b203bf66d6a..78601d033f8d8 100644 --- a/src/core/server/saved_objects/migrations/core/index_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/core/index_migrator.test.ts @@ -18,18 +18,22 @@ */ import _ from 'lodash'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; import { SavedObjectUnsanitizedDoc, SavedObjectsSerializer } from '../../serialization'; import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry'; import { IndexMigrator } from './index_migrator'; +import { MigrationOpts } from './migration_context'; import { loggingSystemMock } from '../../../logging/logging_system.mock'; describe('IndexMigrator', () => { - let testOpts: any; + let testOpts: jest.Mocked & { + client: ReturnType; + }; beforeEach(() => { testOpts = { batchSize: 10, - callCluster: jest.fn(), + client: elasticsearchClientMock.createElasticSearchClient(), index: '.kibana', log: loggingSystemMock.create().get(), mappingProperties: {}, @@ -44,15 +48,15 @@ describe('IndexMigrator', () => { }); test('creates the index if it does not exist', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - testOpts.mappingProperties = { foo: { type: 'long' } }; + testOpts.mappingProperties = { foo: { type: 'long' } as any }; - withIndex(callCluster, { index: { status: 404 }, alias: { status: 404 } }); + withIndex(client, { index: { statusCode: 404 }, alias: { statusCode: 404 } }); await new IndexMigrator(testOpts).migrate(); - expect(callCluster).toHaveBeenCalledWith('indices.create', { + expect(client.indices.create).toHaveBeenCalledWith({ body: { mappings: { dynamic: 'strict', @@ -91,9 +95,9 @@ describe('IndexMigrator', () => { }); test('returns stats about the migration', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - withIndex(callCluster, { index: { status: 404 }, alias: { status: 404 } }); + withIndex(client, { index: { statusCode: 404 }, alias: { statusCode: 404 } }); const result = await new IndexMigrator(testOpts).migrate(); @@ -105,9 +109,9 @@ describe('IndexMigrator', () => { }); test('fails if there are multiple root doc types', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - withIndex(callCluster, { + withIndex(client, { index: { '.kibana_1': { aliases: {}, @@ -129,9 +133,9 @@ describe('IndexMigrator', () => { }); test('fails if root doc type is not "doc"', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - withIndex(callCluster, { + withIndex(client, { index: { '.kibana_1': { aliases: {}, @@ -152,11 +156,11 @@ describe('IndexMigrator', () => { }); test('retains unknown core field mappings from the previous index', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - testOpts.mappingProperties = { foo: { type: 'text' } }; + testOpts.mappingProperties = { foo: { type: 'text' } as any }; - withIndex(callCluster, { + withIndex(client, { index: { '.kibana_1': { aliases: {}, @@ -171,7 +175,7 @@ describe('IndexMigrator', () => { await new IndexMigrator(testOpts).migrate(); - expect(callCluster).toHaveBeenCalledWith('indices.create', { + expect(client.indices.create).toHaveBeenCalledWith({ body: { mappings: { dynamic: 'strict', @@ -211,11 +215,11 @@ describe('IndexMigrator', () => { }); test('disables complex field mappings from unknown types in the previous index', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - testOpts.mappingProperties = { foo: { type: 'text' } }; + testOpts.mappingProperties = { foo: { type: 'text' } as any }; - withIndex(callCluster, { + withIndex(client, { index: { '.kibana_1': { aliases: {}, @@ -230,7 +234,7 @@ describe('IndexMigrator', () => { await new IndexMigrator(testOpts).migrate(); - expect(callCluster).toHaveBeenCalledWith('indices.create', { + expect(client.indices.create).toHaveBeenCalledWith({ body: { mappings: { dynamic: 'strict', @@ -270,31 +274,31 @@ describe('IndexMigrator', () => { }); test('points the alias at the dest index', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; - withIndex(callCluster, { index: { status: 404 }, alias: { status: 404 } }); + withIndex(client, { index: { statusCode: 404 }, alias: { statusCode: 404 } }); await new IndexMigrator(testOpts).migrate(); - expect(callCluster).toHaveBeenCalledWith('indices.create', expect.any(Object)); - expect(callCluster).toHaveBeenCalledWith('indices.updateAliases', { + expect(client.indices.create).toHaveBeenCalledWith(expect.any(Object)); + expect(client.indices.updateAliases).toHaveBeenCalledWith({ body: { actions: [{ add: { alias: '.kibana', index: '.kibana_1' } }] }, }); }); test('removes previous indices from the alias', async () => { - const { callCluster } = testOpts; + const { client } = testOpts; testOpts.documentMigrator.migrationVersion = { dashboard: '2.4.5', }; - withIndex(callCluster, { numOutOfDate: 1 }); + withIndex(client, { numOutOfDate: 1 }); await new IndexMigrator(testOpts).migrate(); - expect(callCluster).toHaveBeenCalledWith('indices.create', expect.any(Object)); - expect(callCluster).toHaveBeenCalledWith('indices.updateAliases', { + expect(client.indices.create).toHaveBeenCalledWith(expect.any(Object)); + expect(client.indices.updateAliases).toHaveBeenCalledWith({ body: { actions: [ { remove: { alias: '.kibana', index: '.kibana_1' } }, @@ -306,7 +310,7 @@ describe('IndexMigrator', () => { test('transforms all docs from the original index', async () => { let count = 0; - const { callCluster } = testOpts; + const { client } = testOpts; const migrateDoc = jest.fn((doc: SavedObjectUnsanitizedDoc) => { return { ...doc, @@ -319,7 +323,7 @@ describe('IndexMigrator', () => { migrate: migrateDoc, }; - withIndex(callCluster, { + withIndex(client, { numOutOfDate: 1, docs: [ [{ _id: 'foo:1', _source: { type: 'foo', foo: { name: 'Bar' } } }], @@ -344,30 +348,27 @@ describe('IndexMigrator', () => { migrationVersion: {}, references: [], }); - const bulkCalls = callCluster.mock.calls.filter(([action]: any) => action === 'bulk'); - expect(bulkCalls.length).toEqual(2); - expect(bulkCalls[0]).toEqual([ - 'bulk', - { - body: [ - { index: { _id: 'foo:1', _index: '.kibana_2' } }, - { foo: { name: 1 }, type: 'foo', migrationVersion: {}, references: [] }, - ], - }, - ]); - expect(bulkCalls[1]).toEqual([ - 'bulk', - { - body: [ - { index: { _id: 'foo:2', _index: '.kibana_2' } }, - { foo: { name: 2 }, type: 'foo', migrationVersion: {}, references: [] }, - ], - }, - ]); + + expect(client.bulk).toHaveBeenCalledTimes(2); + expect(client.bulk).toHaveBeenNthCalledWith(1, { + body: [ + { index: { _id: 'foo:1', _index: '.kibana_2' } }, + { foo: { name: 1 }, type: 'foo', migrationVersion: {}, references: [] }, + ], + }); + expect(client.bulk).toHaveBeenNthCalledWith(2, { + body: [ + { index: { _id: 'foo:2', _index: '.kibana_2' } }, + { foo: { name: 2 }, type: 'foo', migrationVersion: {}, references: [] }, + ], + }); }); }); -function withIndex(callCluster: jest.Mock, opts: any = {}) { +function withIndex( + client: ReturnType, + opts: any = {} +) { const defaultIndex = { '.kibana_1': { aliases: { '.kibana': {} }, @@ -386,39 +387,56 @@ function withIndex(callCluster: jest.Mock, opts: any = {}) { const { alias = defaultAlias } = opts; const { index = defaultIndex } = opts; const { docs = [] } = opts; - const searchResult = (i: number) => - Promise.resolve({ - _scroll_id: i, - _shards: { - successful: 1, - total: 1, - }, - hits: { - hits: docs[i] || [], - }, - }); + const searchResult = (i: number) => ({ + _scroll_id: i, + _shards: { + successful: 1, + total: 1, + }, + hits: { + hits: docs[i] || [], + }, + }); let scrollCallCounter = 1; - callCluster.mockImplementation((method) => { - if (method === 'indices.get') { - return Promise.resolve(index); - } else if (method === 'indices.getAlias') { - return Promise.resolve(alias); - } else if (method === 'reindex') { - return Promise.resolve({ task: 'zeid', _shards: { successful: 1, total: 1 } }); - } else if (method === 'tasks.get') { - return Promise.resolve({ completed: true }); - } else if (method === 'search') { - return searchResult(0); - } else if (method === 'bulk') { - return Promise.resolve({ items: [] }); - } else if (method === 'count') { - return Promise.resolve({ count: numOutOfDate, _shards: { successful: 1, total: 1 } }); - } else if (method === 'scroll' && scrollCallCounter <= docs.length) { + client.indices.get.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise(index, { + statusCode: index.statusCode, + }) + ); + client.indices.getAlias.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise(alias, { + statusCode: index.statusCode, + }) + ); + client.reindex.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + task: 'zeid', + _shards: { successful: 1, total: 1 }, + }) + ); + client.tasks.get.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ completed: true }) + ); + client.search.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise(searchResult(0)) + ); + client.bulk.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ items: [] }) + ); + client.count.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + count: numOutOfDate, + _shards: { successful: 1, total: 1 }, + }) + ); + client.scroll.mockImplementation(() => { + if (scrollCallCounter <= docs.length) { const result = searchResult(scrollCallCounter); scrollCallCounter++; - return result; + return elasticsearchClientMock.createSuccessTransportRequestPromise(result); } + return elasticsearchClientMock.createSuccessTransportRequestPromise({}); }); } diff --git a/src/core/server/saved_objects/migrations/core/index_migrator.ts b/src/core/server/saved_objects/migrations/core/index_migrator.ts index e588eb7877322..ceca27fa87723 100644 --- a/src/core/server/saved_objects/migrations/core/index_migrator.ts +++ b/src/core/server/saved_objects/migrations/core/index_migrator.ts @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - import { diffMappings } from './build_active_mappings'; import * as Index from './elastic_index'; import { migrateRawDocs } from './migrate_raw_docs'; @@ -71,11 +70,11 @@ export class IndexMigrator { * Determines what action the migration system needs to take (none, patch, migrate). */ async function requiresMigration(context: Context): Promise { - const { callCluster, alias, documentMigrator, dest, log } = context; + const { client, alias, documentMigrator, dest, log } = context; // Have all of our known migrations been run against the index? const hasMigrations = await Index.migrationsUpToDate( - callCluster, + client, alias, documentMigrator.migrationVersion ); @@ -85,7 +84,7 @@ async function requiresMigration(context: Context): Promise { } // Is our index aliased? - const refreshedSource = await Index.fetchInfo(callCluster, alias); + const refreshedSource = await Index.fetchInfo(client, alias); if (!refreshedSource.aliases[alias]) { return true; @@ -109,19 +108,19 @@ async function requiresMigration(context: Context): Promise { */ async function migrateIndex(context: Context): Promise { const startTime = Date.now(); - const { callCluster, alias, source, dest, log } = context; + const { client, alias, source, dest, log } = context; await deleteIndexTemplates(context); log.info(`Creating index ${dest.indexName}.`); - await Index.createIndex(callCluster, dest.indexName, dest.mappings); + await Index.createIndex(client, dest.indexName, dest.mappings); await migrateSourceToDest(context); log.info(`Pointing alias ${alias} to ${dest.indexName}.`); - await Index.claimAlias(callCluster, dest.indexName, alias); + await Index.claimAlias(client, dest.indexName, alias); const result: MigrationResult = { status: 'migrated', @@ -139,12 +138,12 @@ async function migrateIndex(context: Context): Promise { * If the obsoleteIndexTemplatePattern option is specified, this will delete any index templates * that match it. */ -async function deleteIndexTemplates({ callCluster, log, obsoleteIndexTemplatePattern }: Context) { +async function deleteIndexTemplates({ client, log, obsoleteIndexTemplatePattern }: Context) { if (!obsoleteIndexTemplatePattern) { return; } - const templates = await callCluster('cat.templates', { + const { body: templates } = await client.cat.templates>({ format: 'json', name: obsoleteIndexTemplatePattern, }); @@ -157,7 +156,7 @@ async function deleteIndexTemplates({ callCluster, log, obsoleteIndexTemplatePat log.info(`Removing index templates: ${templateNames}`); - return Promise.all(templateNames.map((name) => callCluster('indices.deleteTemplate', { name }))); + return Promise.all(templateNames.map((name) => client.indices.deleteTemplate({ name }))); } /** @@ -166,7 +165,7 @@ async function deleteIndexTemplates({ callCluster, log, obsoleteIndexTemplatePat * a situation where the alias moves out from under us as we're migrating docs. */ async function migrateSourceToDest(context: Context) { - const { callCluster, alias, dest, source, batchSize } = context; + const { client, alias, dest, source, batchSize } = context; const { scrollDuration, documentMigrator, log, serializer } = context; if (!source.exists) { @@ -176,10 +175,10 @@ async function migrateSourceToDest(context: Context) { if (!source.aliases[alias]) { log.info(`Reindexing ${alias} to ${source.indexName}`); - await Index.convertToAlias(callCluster, source, alias, batchSize, context.convertToAliasScript); + await Index.convertToAlias(client, source, alias, batchSize, context.convertToAliasScript); } - const read = Index.reader(callCluster, source.indexName, { batchSize, scrollDuration }); + const read = Index.reader(client, source.indexName, { batchSize, scrollDuration }); log.info(`Migrating ${source.indexName} saved objects to ${dest.indexName}`); @@ -193,7 +192,7 @@ async function migrateSourceToDest(context: Context) { log.debug(`Migrating saved objects ${docs.map((d) => d._id).join(', ')}`); await Index.write( - callCluster, + client, dest.indexName, await migrateRawDocs(serializer, documentMigrator.migrate, docs, log) ); diff --git a/src/core/server/saved_objects/migrations/core/migration_context.ts b/src/core/server/saved_objects/migrations/core/migration_context.ts index adf1851a1aa75..0ea362d65623e 100644 --- a/src/core/server/saved_objects/migrations/core/migration_context.ts +++ b/src/core/server/saved_objects/migrations/core/migration_context.ts @@ -25,6 +25,7 @@ */ import { Logger } from 'src/core/server/logging'; +import { MigrationEsClient } from './migration_es_client'; import { SavedObjectsSerializer } from '../../serialization'; import { SavedObjectsTypeMappingDefinitions, @@ -32,16 +33,15 @@ import { IndexMapping, } from '../../mappings'; import { buildActiveMappings } from './build_active_mappings'; -import { CallCluster } from './call_cluster'; import { VersionedTransformer } from './document_migrator'; -import { fetchInfo, FullIndexInfo } from './elastic_index'; +import * as Index from './elastic_index'; import { SavedObjectsMigrationLogger, MigrationLogger } from './migration_logger'; export interface MigrationOpts { batchSize: number; pollInterval: number; scrollDuration: string; - callCluster: CallCluster; + client: MigrationEsClient; index: string; log: Logger; mappingProperties: SavedObjectsTypeMappingDefinitions; @@ -56,11 +56,14 @@ export interface MigrationOpts { obsoleteIndexTemplatePattern?: string; } +/** + * @internal + */ export interface Context { - callCluster: CallCluster; + client: MigrationEsClient; alias: string; - source: FullIndexInfo; - dest: FullIndexInfo; + source: Index.FullIndexInfo; + dest: Index.FullIndexInfo; documentMigrator: VersionedTransformer; log: SavedObjectsMigrationLogger; batchSize: number; @@ -76,13 +79,13 @@ export interface Context { * and various info needed to migrate the source index. */ export async function migrationContext(opts: MigrationOpts): Promise { - const { log, callCluster } = opts; + const { log, client } = opts; const alias = opts.index; - const source = createSourceContext(await fetchInfo(callCluster, alias), alias); + const source = createSourceContext(await Index.fetchInfo(client, alias), alias); const dest = createDestContext(source, alias, opts.mappingProperties); return { - callCluster, + client, alias, source, dest, @@ -97,7 +100,7 @@ export async function migrationContext(opts: MigrationOpts): Promise { }; } -function createSourceContext(source: FullIndexInfo, alias: string) { +function createSourceContext(source: Index.FullIndexInfo, alias: string) { if (source.exists && source.indexName === alias) { return { ...source, @@ -109,10 +112,10 @@ function createSourceContext(source: FullIndexInfo, alias: string) { } function createDestContext( - source: FullIndexInfo, + source: Index.FullIndexInfo, alias: string, typeMappingDefinitions: SavedObjectsTypeMappingDefinitions -): FullIndexInfo { +): Index.FullIndexInfo { const targetMappings = disableUnknownTypeMappingFields( buildActiveMappings(typeMappingDefinitions), source.mappings diff --git a/src/core/server/saved_objects/migrations/core/migration_es_client.test.mock.ts b/src/core/server/saved_objects/migrations/core/migration_es_client.test.mock.ts new file mode 100644 index 0000000000000..8ebed25d87cba --- /dev/null +++ b/src/core/server/saved_objects/migrations/core/migration_es_client.test.mock.ts @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export const migrationRetryCallClusterMock = jest.fn((fn) => fn()); +jest.doMock('../../../elasticsearch/client/retry_call_cluster', () => ({ + migrationRetryCallCluster: migrationRetryCallClusterMock, +})); diff --git a/src/core/server/saved_objects/migrations/core/migration_es_client.test.ts b/src/core/server/saved_objects/migrations/core/migration_es_client.test.ts new file mode 100644 index 0000000000000..40c06677c4a5a --- /dev/null +++ b/src/core/server/saved_objects/migrations/core/migration_es_client.test.ts @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { migrationRetryCallClusterMock } from './migration_es_client.test.mock'; + +import { createMigrationEsClient, MigrationEsClient } from './migration_es_client'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; +import { loggerMock } from '../../../logging/logger.mock'; +import { SavedObjectsErrorHelpers } from '../../service/lib/errors'; + +describe('MigrationEsClient', () => { + let client: ReturnType; + let migrationEsClient: MigrationEsClient; + + beforeEach(() => { + client = elasticsearchClientMock.createElasticSearchClient(); + migrationEsClient = createMigrationEsClient(client, loggerMock.create()); + migrationRetryCallClusterMock.mockClear(); + }); + + it('delegates call to ES client method', async () => { + expect(migrationEsClient.bulk).toStrictEqual(expect.any(Function)); + await migrationEsClient.bulk({ body: [] }); + expect(client.bulk).toHaveBeenCalledTimes(1); + }); + + it('wraps a method call in migrationRetryCallClusterMock', async () => { + await migrationEsClient.bulk({ body: [] }); + expect(migrationRetryCallClusterMock).toHaveBeenCalledTimes(1); + }); + + it('sets maxRetries: 0 to delegate retry logic to migrationRetryCallCluster', async () => { + expect(migrationEsClient.bulk).toStrictEqual(expect.any(Function)); + await migrationEsClient.bulk({ body: [] }); + expect(client.bulk).toHaveBeenCalledWith( + expect.any(Object), + expect.objectContaining({ maxRetries: 0 }) + ); + }); + + it('do not transform elasticsearch errors into saved objects errors', async () => { + expect.assertions(1); + client.bulk = jest.fn().mockRejectedValue(new Error('reason')); + try { + await migrationEsClient.bulk({ body: [] }); + } catch (e) { + expect(SavedObjectsErrorHelpers.isSavedObjectsClientError(e)).toBe(false); + } + }); +}); diff --git a/src/core/server/saved_objects/migrations/core/migration_es_client.ts b/src/core/server/saved_objects/migrations/core/migration_es_client.ts new file mode 100644 index 0000000000000..ff859057f8fe8 --- /dev/null +++ b/src/core/server/saved_objects/migrations/core/migration_es_client.ts @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { TransportRequestOptions } from '@elastic/elasticsearch/lib/Transport'; +import { get } from 'lodash'; +import { set } from '@elastic/safer-lodash-set'; + +import { ElasticsearchClient } from '../../../elasticsearch'; +import { migrationRetryCallCluster } from '../../../elasticsearch/client/retry_call_cluster'; +import { Logger } from '../../../logging'; + +const methods = [ + 'bulk', + 'cat.templates', + 'clearScroll', + 'count', + 'indices.create', + 'indices.delete', + 'indices.deleteTemplate', + 'indices.get', + 'indices.getAlias', + 'indices.refresh', + 'indices.updateAliases', + 'reindex', + 'search', + 'scroll', + 'tasks.get', +] as const; + +type MethodName = typeof methods[number]; + +export interface MigrationEsClient { + bulk: ElasticsearchClient['bulk']; + cat: { + templates: ElasticsearchClient['cat']['templates']; + }; + clearScroll: ElasticsearchClient['clearScroll']; + count: ElasticsearchClient['count']; + indices: { + create: ElasticsearchClient['indices']['create']; + delete: ElasticsearchClient['indices']['delete']; + deleteTemplate: ElasticsearchClient['indices']['deleteTemplate']; + get: ElasticsearchClient['indices']['get']; + getAlias: ElasticsearchClient['indices']['getAlias']; + refresh: ElasticsearchClient['indices']['refresh']; + updateAliases: ElasticsearchClient['indices']['updateAliases']; + }; + reindex: ElasticsearchClient['reindex']; + search: ElasticsearchClient['search']; + scroll: ElasticsearchClient['scroll']; + tasks: { + get: ElasticsearchClient['tasks']['get']; + }; +} + +export function createMigrationEsClient( + client: ElasticsearchClient, + log: Logger, + delay?: number +): MigrationEsClient { + return methods.reduce((acc: MigrationEsClient, key: MethodName) => { + set(acc, key, async (params?: unknown, options?: TransportRequestOptions) => { + const fn = get(client, key); + if (!fn) { + throw new Error(`unknown ElasticsearchClient client method [${key}]`); + } + return await migrationRetryCallCluster( + () => fn(params, { maxRetries: 0, ...options }), + log, + delay + ); + }); + return acc; + }, {} as MigrationEsClient); +} diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts index 01b0d1cd0ba3a..c3ed97a89af80 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts @@ -18,6 +18,7 @@ */ import { take } from 'rxjs/operators'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; import { KibanaMigratorOptions, KibanaMigrator } from './kibana_migrator'; import { loggingSystemMock } from '../../../logging/logging_system.mock'; import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry'; @@ -66,26 +67,44 @@ describe('KibanaMigrator', () => { describe('runMigrations', () => { it('only runs migrations once if called multiple times', async () => { const options = mockOptions(); - const clusterStub = jest.fn(() => ({ status: 404 })); - options.callCluster = clusterStub; + options.client.cat.templates.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise( + { templates: [] }, + { statusCode: 404 } + ) + ); + options.client.indices.get.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); + options.client.indices.getAlias.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); + const migrator = new KibanaMigrator(options); + await migrator.runMigrations(); await migrator.runMigrations(); - // callCluster with "cat.templates" is called by "deleteIndexTemplates" function - // and should only be done once - const callClusterCommands = clusterStub.mock.calls - .map(([callClusterPath]) => callClusterPath) - .filter((callClusterPath) => callClusterPath === 'cat.templates'); - expect(callClusterCommands.length).toBe(1); + expect(options.client.cat.templates).toHaveBeenCalledTimes(1); }); it('emits results on getMigratorResult$()', async () => { const options = mockOptions(); - const clusterStub = jest.fn(() => ({ status: 404 })); - options.callCluster = clusterStub; + options.client.cat.templates.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise( + { templates: [] }, + { statusCode: 404 } + ) + ); + options.client.indices.get.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); + options.client.indices.getAlias.mockReturnValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); + const migrator = new KibanaMigrator(options); const migratorStatus = migrator.getStatus$().pipe(take(3)).toPromise(); await migrator.runMigrations(); @@ -107,9 +126,12 @@ describe('KibanaMigrator', () => { }); }); -function mockOptions(): KibanaMigratorOptions { - const callCluster = jest.fn(); - return { +type MockedOptions = KibanaMigratorOptions & { + client: ReturnType; +}; + +const mockOptions = () => { + const options: MockedOptions = { logger: loggingSystemMock.create().get(), kibanaVersion: '8.2.3', savedObjectValidations: {}, @@ -148,6 +170,7 @@ function mockOptions(): KibanaMigratorOptions { scrollDuration: '10m', skip: false, }, - callCluster, + client: elasticsearchClientMock.createElasticSearchClient(), }; -} + return options; +}; diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts index 69b57a498936e..85b9099308807 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts @@ -24,25 +24,21 @@ import { KibanaConfigType } from 'src/core/server/kibana_config'; import { BehaviorSubject } from 'rxjs'; + import { Logger } from '../../../logging'; import { IndexMapping, SavedObjectsTypeMappingDefinitions } from '../../mappings'; import { SavedObjectUnsanitizedDoc, SavedObjectsSerializer } from '../../serialization'; import { docValidator, PropertyValidators } from '../../validation'; -import { - buildActiveMappings, - CallCluster, - IndexMigrator, - MigrationResult, - MigrationStatus, -} from '../core'; +import { buildActiveMappings, IndexMigrator, MigrationResult, MigrationStatus } from '../core'; import { DocumentMigrator, VersionedTransformer } from '../core/document_migrator'; +import { MigrationEsClient } from '../core/'; import { createIndexMap } from '../core/build_index_map'; import { SavedObjectsMigrationConfigType } from '../../saved_objects_config'; import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry'; import { SavedObjectsType } from '../../types'; export interface KibanaMigratorOptions { - callCluster: CallCluster; + client: MigrationEsClient; typeRegistry: ISavedObjectTypeRegistry; savedObjectsConfig: SavedObjectsMigrationConfigType; kibanaConfig: KibanaConfigType; @@ -62,7 +58,7 @@ export interface KibanaMigratorStatus { * Manages the shape of mappings and documents in the Kibana index. */ export class KibanaMigrator { - private readonly callCluster: CallCluster; + private readonly client: MigrationEsClient; private readonly savedObjectsConfig: SavedObjectsMigrationConfigType; private readonly documentMigrator: VersionedTransformer; private readonly kibanaConfig: KibanaConfigType; @@ -80,7 +76,7 @@ export class KibanaMigrator { * Creates an instance of KibanaMigrator. */ constructor({ - callCluster, + client, typeRegistry, kibanaConfig, savedObjectsConfig, @@ -88,7 +84,7 @@ export class KibanaMigrator { kibanaVersion, logger, }: KibanaMigratorOptions) { - this.callCluster = callCluster; + this.client = client; this.kibanaConfig = kibanaConfig; this.savedObjectsConfig = savedObjectsConfig; this.typeRegistry = typeRegistry; @@ -153,7 +149,7 @@ export class KibanaMigrator { const migrators = Object.keys(indexMap).map((index) => { return new IndexMigrator({ batchSize: this.savedObjectsConfig.batchSize, - callCluster: this.callCluster, + client: this.client, documentMigrator: this.documentMigrator, index, log: this.log, diff --git a/src/core/server/saved_objects/saved_objects_service.test.ts b/src/core/server/saved_objects/saved_objects_service.test.ts index e8b2cf0b583b1..8df6a07318c45 100644 --- a/src/core/server/saved_objects/saved_objects_service.test.ts +++ b/src/core/server/saved_objects/saved_objects_service.test.ts @@ -25,18 +25,20 @@ import { } from './saved_objects_service.test.mocks'; import { BehaviorSubject } from 'rxjs'; import { ByteSizeValue } from '@kbn/config-schema'; +import { errors as esErrors } from '@elastic/elasticsearch'; + import { SavedObjectsService } from './saved_objects_service'; import { mockCoreContext } from '../core_context.mock'; -import * as legacyElasticsearch from 'elasticsearch'; import { Env } from '../config'; import { configServiceMock } from '../mocks'; import { elasticsearchServiceMock } from '../elasticsearch/elasticsearch_service.mock'; +import { elasticsearchClientMock } from '../elasticsearch/client/mocks'; import { legacyServiceMock } from '../legacy/legacy_service.mock'; import { httpServiceMock } from '../http/http_service.mock'; +import { httpServerMock } from '../http/http_server.mocks'; import { SavedObjectsClientFactoryProvider } from './service/lib'; import { NodesVersionCompatibility } from '../elasticsearch/version_check/ensure_es_version'; import { SavedObjectsRepository } from './service/lib/repository'; -import { KibanaRequest } from '../http'; jest.mock('./service/lib/repository'); @@ -70,7 +72,7 @@ describe('SavedObjectsService', () => { const createStartDeps = (pluginsInitialized: boolean = true) => { return { pluginsInitialized, - elasticsearch: elasticsearchServiceMock.createStart(), + elasticsearch: elasticsearchServiceMock.createInternalStart(), }; }; @@ -161,26 +163,27 @@ describe('SavedObjectsService', () => { }); describe('#start()', () => { - it('creates a KibanaMigrator which retries NoConnections errors from callAsInternalUser', async () => { + it('creates a KibanaMigrator which retries NoLivingConnectionsError errors from ES client', async () => { const coreContext = createCoreContext(); const soService = new SavedObjectsService(coreContext); const coreSetup = createSetupDeps(); const coreStart = createStartDeps(); - let i = 0; - coreStart.elasticsearch.legacy.client.callAsInternalUser = jest + coreStart.elasticsearch.client.asInternalUser.indices.create = jest .fn() - .mockImplementation(() => - i++ <= 2 - ? Promise.reject(new legacyElasticsearch.errors.NoConnections()) - : Promise.resolve('success') + .mockImplementationOnce(() => + Promise.reject(new esErrors.NoLivingConnectionsError('reason', {} as any)) + ) + .mockImplementationOnce(() => + elasticsearchClientMock.createSuccessTransportRequestPromise('success') ); await soService.setup(coreSetup); await soService.start(coreStart, 1); - return expect(KibanaMigratorMock.mock.calls[0][0].callCluster()).resolves.toMatch('success'); + const response = await KibanaMigratorMock.mock.calls[0][0].client.indices.create(); + return expect(response.body).toBe('success'); }); it('skips KibanaMigrator migrations when pluginsInitialized=false', async () => { @@ -291,22 +294,15 @@ describe('SavedObjectsService', () => { const coreStart = createStartDeps(); const { createScopedRepository } = await soService.start(coreStart); - const req = {} as KibanaRequest; + const req = httpServerMock.createKibanaRequest(); createScopedRepository(req); - expect(coreStart.elasticsearch.legacy.client.asScoped).toHaveBeenCalledWith(req); - - const [ - { - value: { callAsCurrentUser }, - }, - ] = coreStart.elasticsearch.legacy.client.asScoped.mock.results; + expect(coreStart.elasticsearch.client.asScoped).toHaveBeenCalledWith(req); const [ - [, , , callCluster, includedHiddenTypes], + [, , , , includedHiddenTypes], ] = (SavedObjectsRepository.createRepository as jest.Mocked).mock.calls; - expect(callCluster).toBe(callAsCurrentUser); expect(includedHiddenTypes).toEqual([]); }); @@ -318,7 +314,7 @@ describe('SavedObjectsService', () => { const coreStart = createStartDeps(); const { createScopedRepository } = await soService.start(coreStart); - const req = {} as KibanaRequest; + const req = httpServerMock.createKibanaRequest(); createScopedRepository(req, ['someHiddenType']); const [ @@ -341,11 +337,10 @@ describe('SavedObjectsService', () => { createInternalRepository(); const [ - [, , , callCluster, includedHiddenTypes], + [, , , client, includedHiddenTypes], ] = (SavedObjectsRepository.createRepository as jest.Mocked).mock.calls; - expect(coreStart.elasticsearch.legacy.client.callAsInternalUser).toBe(callCluster); - expect(callCluster).toBe(coreStart.elasticsearch.legacy.client.callAsInternalUser); + expect(coreStart.elasticsearch.client.asInternalUser).toBe(client); expect(includedHiddenTypes).toEqual([]); }); diff --git a/src/core/server/saved_objects/saved_objects_service.ts b/src/core/server/saved_objects/saved_objects_service.ts index c2d4f49d7ee2a..f05e912b12ad8 100644 --- a/src/core/server/saved_objects/saved_objects_service.ts +++ b/src/core/server/saved_objects/saved_objects_service.ts @@ -30,13 +30,12 @@ import { KibanaMigrator, IKibanaMigrator } from './migrations'; import { CoreContext } from '../core_context'; import { LegacyServiceDiscoverPlugins } from '../legacy'; import { - LegacyAPICaller, - ElasticsearchServiceStart, - ILegacyClusterClient, + ElasticsearchClient, + IClusterClient, InternalElasticsearchServiceSetup, + InternalElasticsearchServiceStart, } from '../elasticsearch'; import { KibanaConfigType } from '../kibana_config'; -import { migrationsRetryCallCluster } from '../elasticsearch/legacy'; import { SavedObjectsConfigType, SavedObjectsMigrationConfigType, @@ -57,7 +56,7 @@ import { SavedObjectsSerializer } from './serialization'; import { registerRoutes } from './routes'; import { ServiceStatus } from '../status'; import { calculateStatus$ } from './status'; - +import { createMigrationEsClient } from './migrations/core/'; /** * Saved Objects is Kibana's data persistence mechanism allowing plugins to * use Elasticsearch for storing and querying state. The SavedObjectsServiceSetup API exposes methods @@ -284,7 +283,7 @@ interface WrappedClientFactoryWrapper { /** @internal */ export interface SavedObjectsStartDeps { - elasticsearch: ElasticsearchServiceStart; + elasticsearch: InternalElasticsearchServiceStart; pluginsInitialized?: boolean; } @@ -383,12 +382,12 @@ export class SavedObjectsService .atPath('kibana') .pipe(first()) .toPromise(); - const client = elasticsearch.legacy.client; + const client = elasticsearch.client; const migrator = this.createMigrator( kibanaConfig, this.config.migration, - client, + elasticsearch.client, migrationsRetryDelay ); @@ -434,21 +433,24 @@ export class SavedObjectsService await migrator.runMigrations(); } - const createRepository = (callCluster: LegacyAPICaller, includedHiddenTypes: string[] = []) => { + const createRepository = ( + esClient: ElasticsearchClient, + includedHiddenTypes: string[] = [] + ) => { return SavedObjectsRepository.createRepository( migrator, this.typeRegistry, kibanaConfig.index, - callCluster, + esClient, includedHiddenTypes ); }; const repositoryFactory: SavedObjectsRepositoryFactory = { createInternalRepository: (includedHiddenTypes?: string[]) => - createRepository(client.callAsInternalUser, includedHiddenTypes), + createRepository(client.asInternalUser, includedHiddenTypes), createScopedRepository: (req: KibanaRequest, includedHiddenTypes?: string[]) => - createRepository(client.asScoped(req).callAsCurrentUser, includedHiddenTypes), + createRepository(client.asScoped(req).asCurrentUser, includedHiddenTypes), }; const clientProvider = new SavedObjectsClientProvider({ @@ -484,7 +486,7 @@ export class SavedObjectsService private createMigrator( kibanaConfig: KibanaConfigType, savedObjectsConfig: SavedObjectsMigrationConfigType, - esClient: ILegacyClusterClient, + client: IClusterClient, migrationsRetryDelay?: number ): KibanaMigrator { return new KibanaMigrator({ @@ -494,11 +496,7 @@ export class SavedObjectsService savedObjectsConfig, savedObjectValidations: this.validations, kibanaConfig, - callCluster: migrationsRetryCallCluster( - esClient.callAsInternalUser, - this.logger, - migrationsRetryDelay - ), + client: createMigrationEsClient(client.asInternalUser, this.logger, migrationsRetryDelay), }); } } diff --git a/src/core/server/saved_objects/serialization/index.ts b/src/core/server/saved_objects/serialization/index.ts index f7f4e75704341..812a0770ad988 100644 --- a/src/core/server/saved_objects/serialization/index.ts +++ b/src/core/server/saved_objects/serialization/index.ts @@ -22,5 +22,10 @@ * the raw document format as stored in ElasticSearch. */ -export { SavedObjectUnsanitizedDoc, SavedObjectSanitizedDoc, SavedObjectsRawDoc } from './types'; +export { + SavedObjectUnsanitizedDoc, + SavedObjectSanitizedDoc, + SavedObjectsRawDoc, + SavedObjectsRawDocSource, +} from './types'; export { SavedObjectsSerializer } from './serializer'; diff --git a/src/core/server/saved_objects/service/lib/decorate_es_error.test.ts b/src/core/server/saved_objects/service/lib/decorate_es_error.test.ts index 1fdebd87397eb..623610eebd8d7 100644 --- a/src/core/server/saved_objects/service/lib/decorate_es_error.test.ts +++ b/src/core/server/saved_objects/service/lib/decorate_es_error.test.ts @@ -17,75 +17,93 @@ * under the License. */ -import { errors as esErrors } from 'elasticsearch'; - +import { errors as esErrors } from '@elastic/elasticsearch'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; import { decorateEsError } from './decorate_es_error'; import { SavedObjectsErrorHelpers } from './errors'; describe('savedObjectsClient/decorateEsError', () => { it('always returns the same error it receives', () => { - const error = new Error(); + const error = new esErrors.ResponseError(elasticsearchClientMock.createApiResponse()); expect(decorateEsError(error)).toBe(error); }); - it('makes es.ConnectionFault a SavedObjectsClient/EsUnavailable error', () => { - const error = new esErrors.ConnectionFault(); + it('makes ConnectionError a SavedObjectsClient/EsUnavailable error', () => { + const error = new esErrors.ConnectionError( + 'reason', + elasticsearchClientMock.createApiResponse() + ); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(true); }); - it('makes es.ServiceUnavailable a SavedObjectsClient/EsUnavailable error', () => { - const error = new esErrors.ServiceUnavailable(); + it('makes ServiceUnavailable a SavedObjectsClient/EsUnavailable error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 503 }) + ); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(true); }); - it('makes es.NoConnections a SavedObjectsClient/EsUnavailable error', () => { - const error = new esErrors.NoConnections(); + it('makes NoLivingConnectionsError a SavedObjectsClient/EsUnavailable error', () => { + const error = new esErrors.NoLivingConnectionsError( + 'reason', + elasticsearchClientMock.createApiResponse() + ); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(true); }); - it('makes es.RequestTimeout a SavedObjectsClient/EsUnavailable error', () => { - const error = new esErrors.RequestTimeout(); + it('makes TimeoutError a SavedObjectsClient/EsUnavailable error', () => { + const error = new esErrors.TimeoutError('reason', elasticsearchClientMock.createApiResponse()); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsUnavailableError(error)).toBe(true); }); - it('makes es.Conflict a SavedObjectsClient/Conflict error', () => { - const error = new esErrors.Conflict(); + it('makes Conflict a SavedObjectsClient/Conflict error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 409 }) + ); expect(SavedObjectsErrorHelpers.isConflictError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isConflictError(error)).toBe(true); }); - it('makes es.AuthenticationException a SavedObjectsClient/NotAuthorized error', () => { - const error = new esErrors.AuthenticationException(); + it('makes NotAuthorized a SavedObjectsClient/NotAuthorized error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 401 }) + ); expect(SavedObjectsErrorHelpers.isNotAuthorizedError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isNotAuthorizedError(error)).toBe(true); }); - it('makes es.Forbidden a SavedObjectsClient/Forbidden error', () => { - const error = new esErrors.Forbidden(); + it('makes Forbidden a SavedObjectsClient/Forbidden error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 403 }) + ); expect(SavedObjectsErrorHelpers.isForbiddenError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isForbiddenError(error)).toBe(true); }); - it('makes es.RequestEntityTooLarge a SavedObjectsClient/RequestEntityTooLarge error', () => { - const error = new esErrors.RequestEntityTooLarge(); + it('makes RequestEntityTooLarge a SavedObjectsClient/RequestEntityTooLarge error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 413 }) + ); expect(SavedObjectsErrorHelpers.isRequestEntityTooLargeError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isRequestEntityTooLargeError(error)).toBe(true); }); - it('discards es.NotFound errors and returns a generic NotFound error', () => { - const error = new esErrors.NotFound(); + it('discards NotFound errors and returns a generic NotFound error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 404 }) + ); expect(SavedObjectsErrorHelpers.isNotFoundError(error)).toBe(false); const genericError = decorateEsError(error); expect(genericError).not.toBe(error); @@ -93,8 +111,10 @@ describe('savedObjectsClient/decorateEsError', () => { expect(SavedObjectsErrorHelpers.isNotFoundError(genericError)).toBe(true); }); - it('makes es.BadRequest a SavedObjectsClient/BadRequest error', () => { - const error = new esErrors.BadRequest(); + it('makes BadRequest a SavedObjectsClient/BadRequest error', () => { + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 400 }) + ); expect(SavedObjectsErrorHelpers.isBadRequestError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isBadRequestError(error)).toBe(true); @@ -102,10 +122,16 @@ describe('savedObjectsClient/decorateEsError', () => { describe('when es.BadRequest has a reason', () => { it('makes a SavedObjectsClient/esCannotExecuteScriptError error when script context is disabled', () => { - const error = new esErrors.BadRequest(); - (error as Record).body = { - error: { reason: 'cannot execute scripts using [update] context' }, - }; + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ + statusCode: 400, + body: { + error: { + reason: 'cannot execute scripts using [update] context', + }, + }, + }) + ); expect(SavedObjectsErrorHelpers.isEsCannotExecuteScriptError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsCannotExecuteScriptError(error)).toBe(true); @@ -113,10 +139,16 @@ describe('savedObjectsClient/decorateEsError', () => { }); it('makes a SavedObjectsClient/esCannotExecuteScriptError error when inline scripts are disabled', () => { - const error = new esErrors.BadRequest(); - (error as Record).body = { - error: { reason: 'cannot execute [inline] scripts' }, - }; + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ + statusCode: 400, + body: { + error: { + reason: 'cannot execute [inline] scripts', + }, + }, + }) + ); expect(SavedObjectsErrorHelpers.isEsCannotExecuteScriptError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isEsCannotExecuteScriptError(error)).toBe(true); @@ -124,8 +156,9 @@ describe('savedObjectsClient/decorateEsError', () => { }); it('makes a SavedObjectsClient/BadRequest error for any other reason', () => { - const error = new esErrors.BadRequest(); - (error as Record).body = { error: { reason: 'some other reason' } }; + const error = new esErrors.ResponseError( + elasticsearchClientMock.createApiResponse({ statusCode: 400 }) + ); expect(SavedObjectsErrorHelpers.isBadRequestError(error)).toBe(false); expect(decorateEsError(error)).toBe(error); expect(SavedObjectsErrorHelpers.isBadRequestError(error)).toBe(true); @@ -133,7 +166,7 @@ describe('savedObjectsClient/decorateEsError', () => { }); it('returns other errors as Boom errors', () => { - const error = new Error(); + const error = new esErrors.ResponseError(elasticsearchClientMock.createApiResponse()); expect(error).not.toHaveProperty('isBoom'); expect(decorateEsError(error)).toBe(error); expect(error).toHaveProperty('isBoom'); diff --git a/src/core/server/saved_objects/service/lib/decorate_es_error.ts b/src/core/server/saved_objects/service/lib/decorate_es_error.ts index 7d1575798c357..cf8a16cdaae6f 100644 --- a/src/core/server/saved_objects/service/lib/decorate_es_error.ts +++ b/src/core/server/saved_objects/service/lib/decorate_es_error.ts @@ -17,65 +17,66 @@ * under the License. */ -import * as legacyElasticsearch from 'elasticsearch'; +import { errors as esErrors } from '@elastic/elasticsearch'; import { get } from 'lodash'; -const { - ConnectionFault, - ServiceUnavailable, - NoConnections, - RequestTimeout, - Conflict, - // @ts-expect-error - 401: NotAuthorized, - // @ts-expect-error - 403: Forbidden, - // @ts-expect-error - 413: RequestEntityTooLarge, - NotFound, - BadRequest, -} = legacyElasticsearch.errors; +const responseErrors = { + isServiceUnavailable: (statusCode: number) => statusCode === 503, + isConflict: (statusCode: number) => statusCode === 409, + isNotAuthorized: (statusCode: number) => statusCode === 401, + isForbidden: (statusCode: number) => statusCode === 403, + isRequestEntityTooLarge: (statusCode: number) => statusCode === 413, + isNotFound: (statusCode: number) => statusCode === 404, + isBadRequest: (statusCode: number) => statusCode === 400, +}; +const { ConnectionError, NoLivingConnectionsError, TimeoutError } = esErrors; const SCRIPT_CONTEXT_DISABLED_REGEX = /(?:cannot execute scripts using \[)([a-z]*)(?:\] context)/; const INLINE_SCRIPTS_DISABLED_MESSAGE = 'cannot execute [inline] scripts'; import { SavedObjectsErrorHelpers } from './errors'; -export function decorateEsError(error: Error) { +type EsErrors = + | esErrors.ConnectionError + | esErrors.NoLivingConnectionsError + | esErrors.TimeoutError + | esErrors.ResponseError; + +export function decorateEsError(error: EsErrors) { if (!(error instanceof Error)) { throw new Error('Expected an instance of Error'); } const { reason } = get(error, 'body.error', { reason: undefined }) as { reason?: string }; if ( - error instanceof ConnectionFault || - error instanceof ServiceUnavailable || - error instanceof NoConnections || - error instanceof RequestTimeout + error instanceof ConnectionError || + error instanceof NoLivingConnectionsError || + error instanceof TimeoutError || + responseErrors.isServiceUnavailable(error.statusCode) ) { return SavedObjectsErrorHelpers.decorateEsUnavailableError(error, reason); } - if (error instanceof Conflict) { + if (responseErrors.isConflict(error.statusCode)) { return SavedObjectsErrorHelpers.decorateConflictError(error, reason); } - if (error instanceof NotAuthorized) { + if (responseErrors.isNotAuthorized(error.statusCode)) { return SavedObjectsErrorHelpers.decorateNotAuthorizedError(error, reason); } - if (error instanceof Forbidden) { + if (responseErrors.isForbidden(error.statusCode)) { return SavedObjectsErrorHelpers.decorateForbiddenError(error, reason); } - if (error instanceof RequestEntityTooLarge) { + if (responseErrors.isRequestEntityTooLarge(error.statusCode)) { return SavedObjectsErrorHelpers.decorateRequestEntityTooLargeError(error, reason); } - if (error instanceof NotFound) { + if (responseErrors.isNotFound(error.statusCode)) { return SavedObjectsErrorHelpers.createGenericNotFoundError(); } - if (error instanceof BadRequest) { + if (responseErrors.isBadRequest(error.statusCode)) { if ( SCRIPT_CONTEXT_DISABLED_REGEX.test(reason || '') || reason === INLINE_SCRIPTS_DISABLED_MESSAGE diff --git a/src/core/server/saved_objects/service/lib/repository.test.js b/src/core/server/saved_objects/service/lib/repository.test.js index d563edbe66c9b..b902179b012ff 100644 --- a/src/core/server/saved_objects/service/lib/repository.test.js +++ b/src/core/server/saved_objects/service/lib/repository.test.js @@ -24,6 +24,7 @@ import { SavedObjectsSerializer } from '../../serialization'; import { encodeHitVersion } from '../../version'; import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry'; import { DocumentMigrator } from '../../migrations/core/document_migrator'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; jest.mock('./search_dsl/search_dsl', () => ({ getSearchDsl: jest.fn() })); @@ -40,7 +41,7 @@ const createUnsupportedTypeError = (...args) => SavedObjectsErrorHelpers.createUnsupportedTypeError(...args).output.payload; describe('SavedObjectsRepository', () => { - let callAdminCluster; + let client; let savedObjectsRepository; let migrator; @@ -170,26 +171,11 @@ describe('SavedObjectsRepository', () => { }); const getMockMgetResponse = (objects, namespace) => ({ - status: 200, docs: objects.map((obj) => obj.found === false ? obj : getMockGetResponse({ ...obj, namespace }) ), }); - const expectClusterCalls = (...actions) => { - for (let i = 0; i < actions.length; i++) { - expect(callAdminCluster).toHaveBeenNthCalledWith(i + 1, actions[i], expect.any(Object)); - } - expect(callAdminCluster).toHaveBeenCalledTimes(actions.length); - }; - const expectClusterCallArgs = (args, n = 1) => { - expect(callAdminCluster).toHaveBeenNthCalledWith( - n, - expect.any(String), - expect.objectContaining(args) - ); - }; - expect.extend({ toBeDocumentWithoutError(received, type, id) { if (received.type === type && received.id === id && !received.error) { @@ -215,7 +201,7 @@ describe('SavedObjectsRepository', () => { }; beforeEach(() => { - callAdminCluster = jest.fn(); + client = elasticsearchClientMock.createElasticSearchClient(); migrator = { migrateDocument: jest.fn().mockImplementation(documentMigrator.migrate), runMigrations: async () => ({ status: 'skipped' }), @@ -240,7 +226,7 @@ describe('SavedObjectsRepository', () => { savedObjectsRepository = new SavedObjectsRepository({ index: '.kibana-test', mappings, - callCluster: callAdminCluster, + client, migrator, typeRegistry: registry, serializer, @@ -248,7 +234,7 @@ describe('SavedObjectsRepository', () => { }); savedObjectsRepository._getCurrentTime = jest.fn(() => mockTimestamp); - getSearchDslNS.getSearchDsl.mockReset(); + getSearchDslNS.getSearchDsl.mockClear(); }); const mockMigrationVersion = { foo: '2.3.4' }; @@ -274,25 +260,29 @@ describe('SavedObjectsRepository', () => { // mock a document that exists in two namespaces const mockResponse = getMockGetResponse({ type, id }); mockResponse._source.namespaces = [currentNs1, currentNs2]; - callAdminCluster.mockResolvedValueOnce(mockResponse); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); }; const addToNamespacesSuccess = async (type, id, namespaces, options) => { - mockGetResponse(type, id); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ - _id: `${type}:${id}`, - ...mockVersionProps, - result: 'updated', - }); // this._writeToCluster('update', ...) + mockGetResponse(type, id); + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: `${type}:${id}`, + ...mockVersionProps, + result: 'updated', + }) + ); const result = await savedObjectsRepository.addToNamespaces(type, id, namespaces, options); - expect(callAdminCluster).toHaveBeenCalledTimes(2); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use ES get action then update action`, async () => { await addToNamespacesSuccess(type, id, [newNs1, newNs2]); - expectClusterCalls('get', 'update'); }); it(`defaults to the version of the existing document`, async () => { @@ -301,25 +291,28 @@ describe('SavedObjectsRepository', () => { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgs(versionProperties, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining(versionProperties), + expect.anything() + ); }); it(`accepts version`, async () => { await addToNamespacesSuccess(type, id, [newNs1, newNs2], { version: encodeHitVersion({ _seq_no: 100, _primary_term: 200 }), }); - expectClusterCallArgs({ if_seq_no: 100, if_primary_term: 200 }, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ if_seq_no: 100, if_primary_term: 200 }), + expect.anything() + ); }); it(`defaults to a refresh setting of wait_for`, async () => { await addToNamespacesSuccess(type, id, [newNs1, newNs2]); - expectClusterCallArgs({ refresh: 'wait_for' }, 2); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await addToNamespacesSuccess(type, id, [newNs1, newNs2], { refresh }); - expectClusterCallArgs({ refresh }, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ refresh: 'wait_for' }), + expect.anything() + ); }); }); @@ -337,19 +330,19 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectNotFoundError('unknownType', id, [newNs1, newNs2]); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectNotFoundError(HIDDEN_TYPE, id, [newNs1, newNs2]); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is not multi-namespace`, async () => { const test = async (type) => { const message = `${type} doesn't support multiple namespaces`; await expectBadRequestError(type, id, [newNs1, newNs2], message); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test('index-pattern'); await test(NAMESPACE_AGNOSTIC_TYPE); @@ -359,48 +352,43 @@ describe('SavedObjectsRepository', () => { const test = async (namespaces) => { const message = 'namespaces must be a non-empty array of strings'; await expectBadRequestError(type, id, namespaces, message); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test([]); }); it(`throws when ES is unable to find the document during get`, async () => { - callAdminCluster.mockResolvedValue({ found: false }); // this._callCluster('get', ...) + client.get.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({ found: false }) + ); await expectNotFoundError(type, id, [newNs1, newNs2]); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during get`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); // this._callCluster('get', ...) + client.get.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(type, id, [newNs1, newNs2]); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when the document exists, but not in this namespace`, async () => { - mockGetResponse(type, id); // this._callCluster('get', ...) + mockGetResponse(type, id); await expectNotFoundError(type, id, [newNs1, newNs2], { namespace: 'some-other-namespace', }); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during update`, async () => { - mockGetResponse(type, id); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ status: 404 }); // this._writeToCluster('update', ...) - await expectNotFoundError(type, id, [newNs1, newNs2]); - expectClusterCalls('get', 'update'); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - let callAdminClusterCount = 0; - migrator.runMigrations = jest.fn(async () => - // runMigrations should resolve before callAdminCluster is initiated - expect(callAdminCluster).toHaveBeenCalledTimes(callAdminClusterCount++) + mockGetResponse(type, id); + client.update.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) ); - await expect(addToNamespacesSuccess(type, id, [newNs1, newNs2])).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveReturnedTimes(2); + await expectNotFoundError(type, id, [newNs1, newNs2]); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); }); @@ -457,17 +445,21 @@ describe('SavedObjectsRepository', () => { objects.filter(({ type, id }) => registry.isMultiNamespace(type) && id); if (multiNamespaceObjects?.length) { const response = getMockMgetResponse(multiNamespaceObjects, options?.namespace); - callAdminCluster.mockResolvedValueOnce(response); // this._callCluster('mget', ...) + client.mget.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); } const response = getMockBulkCreateResponse(objects, options?.namespace); - callAdminCluster.mockResolvedValue(response); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValue( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await savedObjectsRepository.bulkCreate(objects, options); - expect(callAdminCluster).toHaveBeenCalledTimes(multiNamespaceObjects?.length ? 2 : 1); + expect(client.mget).toHaveBeenCalledTimes(multiNamespaceObjects?.length ? 1 : 0); return result; }; // bulk create calls have two objects for each source -- the action, and the source - const expectClusterCallArgsAction = ( + const expectClientCallArgsAction = ( objects, { method, _index = expect.any(String), getId = () => expect.any(String) } ) => { @@ -476,7 +468,10 @@ describe('SavedObjectsRepository', () => { body.push({ [method]: { _index, _id: getId(type, id) } }); body.push(expect.any(Object)); } - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }; const expectObjArgs = ({ type, attributes, references }, overrides) => [ @@ -498,53 +493,60 @@ describe('SavedObjectsRepository', () => { ...mockTimestampFields, }); - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES bulk action by default`, async () => { await bulkCreateSuccess([obj1, obj2]); - expectClusterCalls('bulk'); + expect(client.bulk).toHaveBeenCalledTimes(1); }); it(`should use the ES mget action before bulk action for any types that are multi-namespace, when overwrite=true`, async () => { const objects = [obj1, { ...obj2, type: MULTI_NAMESPACE_TYPE }]; await bulkCreateSuccess(objects, { overwrite: true }); - expectClusterCalls('mget', 'bulk'); + expect(client.bulk).toHaveBeenCalledTimes(1); + expect(client.mget).toHaveBeenCalledTimes(1); const docs = [expect.objectContaining({ _id: `${MULTI_NAMESPACE_TYPE}:${obj2.id}` })]; - expectClusterCallArgs({ body: { docs } }, 1); + expect(client.mget.mock.calls[0][0].body).toEqual({ docs }); }); it(`should use the ES create method if ID is undefined and overwrite=true`, async () => { const objects = [obj1, obj2].map((obj) => ({ ...obj, id: undefined })); await bulkCreateSuccess(objects, { overwrite: true }); - expectClusterCallArgsAction(objects, { method: 'create' }); + expectClientCallArgsAction(objects, { method: 'create' }); }); it(`should use the ES create method if ID is undefined and overwrite=false`, async () => { const objects = [obj1, obj2].map((obj) => ({ ...obj, id: undefined })); await bulkCreateSuccess(objects); - expectClusterCallArgsAction(objects, { method: 'create' }); + expectClientCallArgsAction(objects, { method: 'create' }); }); it(`should use the ES index method if ID is defined and overwrite=true`, async () => { await bulkCreateSuccess([obj1, obj2], { overwrite: true }); - expectClusterCallArgsAction([obj1, obj2], { method: 'index' }); + expectClientCallArgsAction([obj1, obj2], { method: 'index' }); }); it(`should use the ES create method if ID is defined and overwrite=false`, async () => { await bulkCreateSuccess([obj1, obj2]); - expectClusterCallArgsAction([obj1, obj2], { method: 'create' }); + expectClientCallArgsAction([obj1, obj2], { method: 'create' }); }); it(`formats the ES request`, async () => { await bulkCreateSuccess([obj1, obj2]); const body = [...expectObjArgs(obj1), ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`adds namespace to request body for any types that are single-namespace`, async () => { await bulkCreateSuccess([obj1, obj2], { namespace }); const expected = expect.objectContaining({ namespace }); const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`doesn't add namespace to request body for any types that are not single-namespace`, async () => { @@ -555,7 +557,10 @@ describe('SavedObjectsRepository', () => { await bulkCreateSuccess(objects, { namespace }); const expected = expect.not.objectContaining({ namespace: expect.anything() }); const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`adds namespaces to request body for any types that are multi-namespace`, async () => { @@ -565,8 +570,12 @@ describe('SavedObjectsRepository', () => { await bulkCreateSuccess(objects, { namespace, overwrite: true }); const expected = expect.objectContaining({ namespaces }); const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }, 2); - callAdminCluster.mockReset(); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); + client.bulk.mockClear(); + client.mget.mockClear(); }; await test(undefined); await test(namespace); @@ -578,8 +587,11 @@ describe('SavedObjectsRepository', () => { await bulkCreateSuccess(objects, { namespace, overwrite: true }); const expected = expect.not.objectContaining({ namespaces: expect.anything() }); const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); - callAdminCluster.mockReset(); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); + client.bulk.mockClear(); }; await test(undefined); await test(namespace); @@ -587,35 +599,32 @@ describe('SavedObjectsRepository', () => { it(`defaults to a refresh setting of wait_for`, async () => { await bulkCreateSuccess([obj1, obj2]); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await bulkCreateSuccess([obj1, obj2], { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ refresh: 'wait_for' }), + expect.anything() + ); }); it(`should use default index`, async () => { await bulkCreateSuccess([obj1, obj2]); - expectClusterCallArgsAction([obj1, obj2], { method: 'create', _index: '.kibana-test' }); + expectClientCallArgsAction([obj1, obj2], { method: 'create', _index: '.kibana-test' }); }); it(`should use custom index`, async () => { await bulkCreateSuccess([obj1, obj2].map((x) => ({ ...x, type: CUSTOM_INDEX_TYPE }))); - expectClusterCallArgsAction([obj1, obj2], { method: 'create', _index: 'custom' }); + expectClientCallArgsAction([obj1, obj2], { method: 'create', _index: 'custom' }); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { const getId = (type, id) => `${namespace}:${type}:${id}`; await bulkCreateSuccess([obj1, obj2], { namespace }); - expectClusterCallArgsAction([obj1, obj2], { method: 'create', getId }); + expectClientCallArgsAction([obj1, obj2], { method: 'create', getId }); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { const getId = (type, id) => `${type}:${id}`; await bulkCreateSuccess([obj1, obj2]); - expectClusterCallArgsAction([obj1, obj2], { method: 'create', getId }); + expectClientCallArgsAction([obj1, obj2], { method: 'create', getId }); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { @@ -625,7 +634,7 @@ describe('SavedObjectsRepository', () => { { ...obj2, type: MULTI_NAMESPACE_TYPE }, ]; await bulkCreateSuccess(objects, { namespace }); - expectClusterCallArgsAction(objects, { method: 'create', getId }); + expectClientCallArgsAction(objects, { method: 'create', getId }); }); }); @@ -645,14 +654,19 @@ describe('SavedObjectsRepository', () => { } else { response = getMockBulkCreateResponse([obj1, obj2]); } - callAdminCluster.mockResolvedValue(response); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const objects = [obj1, obj, obj2]; const result = await savedObjectsRepository.bulkCreate(objects); - expectClusterCalls('bulk'); + expect(client.bulk).toHaveBeenCalled(); const objCall = esError ? expectObjArgs(obj) : []; const body = [...expectObjArgs(obj1), ...objCall, ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectedError, expectSuccess(obj2)], }); @@ -682,17 +696,29 @@ describe('SavedObjectsRepository', () => { }, ], }; - callAdminCluster.mockResolvedValueOnce(response1); // this._callCluster('mget', ...) + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response1) + ); const response2 = getMockBulkCreateResponse([obj1, obj2]); - callAdminCluster.mockResolvedValue(response2); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response2) + ); const options = { overwrite: true }; const result = await savedObjectsRepository.bulkCreate([obj1, obj, obj2], options); - expectClusterCalls('mget', 'bulk'); + expect(client.bulk).toHaveBeenCalled(); + expect(client.mget).toHaveBeenCalled(); + const body1 = { docs: [expect.objectContaining({ _id: `${obj.type}:${obj.id}` })] }; - expectClusterCallArgs({ body: body1 }, 1); + expect(client.mget).toHaveBeenCalledWith( + expect.objectContaining({ body: body1 }), + expect.anything() + ); const body2 = [...expectObjArgs(obj1), ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body: body2 }, 2); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body: body2 }), + expect.anything() + ); expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectErrorConflict(obj), expectSuccess(obj2)], }); @@ -721,14 +747,6 @@ describe('SavedObjectsRepository', () => { }); describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(bulkCreateSuccess([obj1, obj2])).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); - }); - it(`migrates the docs and serializes the migrated docs`, async () => { migrator.migrateDocument.mockImplementation(mockMigrateDocument); await bulkCreateSuccess([obj1, obj2]); @@ -793,9 +811,7 @@ describe('SavedObjectsRepository', () => { }); }); - it(`should return objects in the same order regardless of type`, async () => { - // TODO - }); + it.todo(`should return objects in the same order regardless of type`); it(`handles a mix of successful creates and errors`, async () => { const obj = { @@ -804,9 +820,11 @@ describe('SavedObjectsRepository', () => { }; const objects = [obj1, obj, obj2]; const response = getMockBulkCreateResponse([obj1, obj2]); - callAdminCluster.mockResolvedValue(response); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await savedObjectsRepository.bulkCreate(objects); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.bulk).toHaveBeenCalledTimes(1); expect(result).toEqual({ saved_objects: [expectSuccessResult(obj1), expectError(obj), expectSuccessResult(obj2)], }); @@ -817,7 +835,9 @@ describe('SavedObjectsRepository', () => { // we returned raw ID's when an object without an id was created. const namespace = 'myspace'; const response = getMockBulkCreateResponse([obj1, obj2], namespace); - callAdminCluster.mockResolvedValueOnce(response); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); // Bulk create one object with id unspecified, and one with id specified const result = await savedObjectsRepository.bulkCreate([{ ...obj1, id: undefined }, obj2], { @@ -884,69 +904,78 @@ describe('SavedObjectsRepository', () => { ); const bulkGetSuccess = async (objects, options) => { const response = getMockMgetResponse(objects, options?.namespace); - callAdminCluster.mockReturnValue(response); + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await bulkGet(objects, options); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.mget).toHaveBeenCalledTimes(1); return result; }; - const _expectClusterCallArgs = ( + const _expectClientCallArgs = ( objects, { _index = expect.any(String), getId = () => expect.any(String) } ) => { - expectClusterCallArgs({ - body: { - docs: objects.map(({ type, id }) => - expect.objectContaining({ - _index, - _id: getId(type, id), - }) - ), - }, - }); + expect(client.mget).toHaveBeenCalledWith( + expect.objectContaining({ + body: { + docs: objects.map(({ type, id }) => + expect.objectContaining({ + _index, + _id: getId(type, id), + }) + ), + }, + }), + expect.anything() + ); }; - describe('cluster calls', () => { + describe('client calls', () => { it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { const getId = (type, id) => `${namespace}:${type}:${id}`; await bulkGetSuccess([obj1, obj2], { namespace }); - _expectClusterCallArgs([obj1, obj2], { getId }); + _expectClientCallArgs([obj1, obj2], { getId }); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { const getId = (type, id) => `${type}:${id}`; await bulkGetSuccess([obj1, obj2]); - _expectClusterCallArgs([obj1, obj2], { getId }); + _expectClientCallArgs([obj1, obj2], { getId }); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { const getId = (type, id) => `${type}:${id}`; let objects = [obj1, obj2].map((obj) => ({ ...obj, type: NAMESPACE_AGNOSTIC_TYPE })); await bulkGetSuccess(objects, { namespace }); - _expectClusterCallArgs(objects, { getId }); + _expectClientCallArgs(objects, { getId }); - callAdminCluster.mockReset(); + client.mget.mockClear(); objects = [obj1, obj2].map((obj) => ({ ...obj, type: MULTI_NAMESPACE_TYPE })); await bulkGetSuccess(objects, { namespace }); - _expectClusterCallArgs(objects, { getId }); + _expectClientCallArgs(objects, { getId }); }); }); describe('errors', () => { const bulkGetErrorInvalidType = async ([obj1, obj, obj2]) => { const response = getMockMgetResponse([obj1, obj2]); - callAdminCluster.mockResolvedValue(response); + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await bulkGet([obj1, obj, obj2]); - expectClusterCalls('mget'); + expect(client.mget).toHaveBeenCalled(); expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectErrorInvalidType(obj), expectSuccess(obj2)], }); }; const bulkGetErrorNotFound = async ([obj1, obj, obj2], options, response) => { - callAdminCluster.mockResolvedValue(response); + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await bulkGet([obj1, obj, obj2], options); - expectClusterCalls('mget'); + expect(client.mget).toHaveBeenCalled(); expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectErrorNotFound(obj), expectSuccess(obj2)], }); @@ -982,16 +1011,6 @@ describe('SavedObjectsRepository', () => { }); }); - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(bulkGetSuccess([obj1, obj2])).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); - }); - }); - describe('returns', () => { const expectSuccessResult = ({ type, id }, doc) => ({ type, @@ -1007,14 +1026,16 @@ describe('SavedObjectsRepository', () => { it(`returns early for empty objects argument`, async () => { const result = await bulkGet([]); expect(result).toEqual({ saved_objects: [] }); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.mget).not.toHaveBeenCalled(); }); it(`formats the ES response`, async () => { const response = getMockMgetResponse([obj1, obj2]); - callAdminCluster.mockResolvedValue(response); + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await bulkGet([obj1, obj2]); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.mget).toHaveBeenCalledTimes(1); expect(result).toEqual({ saved_objects: [ expectSuccessResult(obj1, response.docs[0]), @@ -1025,10 +1046,12 @@ describe('SavedObjectsRepository', () => { it(`handles a mix of successful gets and errors`, async () => { const response = getMockMgetResponse([obj1, obj2]); - callAdminCluster.mockResolvedValue(response); + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const obj = { type: 'unknownType', id: 'three' }; const result = await bulkGet([obj1, obj, obj2]); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.mget).toHaveBeenCalledTimes(1); expect(result).toEqual({ saved_objects: [ expectSuccessResult(obj1, response.docs[0]), @@ -1081,20 +1104,23 @@ describe('SavedObjectsRepository', () => { const multiNamespaceObjects = objects.filter(({ type }) => registry.isMultiNamespace(type)); if (multiNamespaceObjects?.length) { const response = getMockMgetResponse(multiNamespaceObjects, options?.namespace); - callAdminCluster.mockResolvedValueOnce(response); // this._callCluster('mget', ...) + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); } const response = getMockBulkUpdateResponse(objects, options?.namespace); - callAdminCluster.mockResolvedValue(response); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await savedObjectsRepository.bulkUpdate(objects, options); - expect(callAdminCluster).toHaveBeenCalledTimes(multiNamespaceObjects?.length ? 2 : 1); + expect(client.mget).toHaveBeenCalledTimes(multiNamespaceObjects?.length ? 1 : 0); return result; }; // bulk create calls have two objects for each source -- the action, and the source - const expectClusterCallArgsAction = ( + const expectClientCallArgsAction = ( objects, - { method, _index = expect.any(String), getId = () => expect.any(String), overrides }, - n + { method, _index = expect.any(String), getId = () => expect.any(String), overrides } ) => { const body = []; for (const { type, id } of objects) { @@ -1107,7 +1133,10 @@ describe('SavedObjectsRepository', () => { }); body.push(expect.any(Object)); } - expectClusterCallArgs({ body }, n); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }; const expectObjArgs = ({ type, attributes }) => [ @@ -1120,44 +1149,58 @@ describe('SavedObjectsRepository', () => { }, ]; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES bulk action by default`, async () => { await bulkUpdateSuccess([obj1, obj2]); - expectClusterCalls('bulk'); + expect(client.bulk).toHaveBeenCalled(); }); it(`should use the ES mget action before bulk action for any types that are multi-namespace`, async () => { const objects = [obj1, { ...obj2, type: MULTI_NAMESPACE_TYPE }]; await bulkUpdateSuccess(objects); - expectClusterCalls('mget', 'bulk'); + expect(client.bulk).toHaveBeenCalled(); + expect(client.mget).toHaveBeenCalled(); + const docs = [expect.objectContaining({ _id: `${MULTI_NAMESPACE_TYPE}:${obj2.id}` })]; - expectClusterCallArgs({ body: { docs } }, 1); + expect(client.mget).toHaveBeenCalledWith( + expect.objectContaining({ body: { docs } }), + expect.anything() + ); }); it(`formats the ES request`, async () => { await bulkUpdateSuccess([obj1, obj2]); const body = [...expectObjArgs(obj1), ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`formats the ES request for any types that are multi-namespace`, async () => { const _obj2 = { ...obj2, type: MULTI_NAMESPACE_TYPE }; await bulkUpdateSuccess([obj1, _obj2]); const body = [...expectObjArgs(obj1), ...expectObjArgs(_obj2)]; - expectClusterCallArgs({ body }, 2); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`doesnt call Elasticsearch if there are no valid objects to update`, async () => { const objects = [obj1, obj2].map((x) => ({ ...x, type: 'unknownType' })); await savedObjectsRepository.bulkUpdate(objects); - expect(callAdminCluster).toHaveBeenCalledTimes(0); + expect(client.bulk).toHaveBeenCalledTimes(0); }); it(`defaults to no references`, async () => { await bulkUpdateSuccess([obj1, obj2]); const expected = { doc: expect.not.objectContaining({ references: expect.anything() }) }; const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); }); it(`accepts custom references array`, async () => { @@ -1166,8 +1209,11 @@ describe('SavedObjectsRepository', () => { await bulkUpdateSuccess(objects); const expected = { doc: expect.objectContaining({ references }) }; const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); - callAdminCluster.mockReset(); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); + client.bulk.mockClear(); }; await test(references); await test(['string']); @@ -1180,8 +1226,11 @@ describe('SavedObjectsRepository', () => { await bulkUpdateSuccess(objects); const expected = { doc: expect.not.objectContaining({ references: expect.anything() }) }; const body = [expect.any(Object), expected, expect.any(Object), expected]; - expectClusterCallArgs({ body }); - callAdminCluster.mockReset(); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); + client.bulk.mockClear(); }; await test('string'); await test(123); @@ -1191,13 +1240,10 @@ describe('SavedObjectsRepository', () => { it(`defaults to a refresh setting of wait_for`, async () => { await bulkUpdateSuccess([obj1, obj2]); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await bulkUpdateSuccess([obj1, obj2], { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ refresh: 'wait_for' }), + expect.anything() + ); }); it(`defaults to the version of the existing document for multi-namespace types`, async () => { @@ -1211,13 +1257,13 @@ describe('SavedObjectsRepository', () => { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgsAction(objects, { method: 'update', overrides }, 2); + expectClientCallArgsAction(objects, { method: 'update', overrides }); }); it(`defaults to no version for types that are not multi-namespace`, async () => { const objects = [obj1, { ...obj2, type: NAMESPACE_AGNOSTIC_TYPE }]; await bulkUpdateSuccess(objects); - expectClusterCallArgsAction(objects, { method: 'update' }); + expectClientCallArgsAction(objects, { method: 'update' }); }); it(`accepts version`, async () => { @@ -1229,27 +1275,27 @@ describe('SavedObjectsRepository', () => { ]; await bulkUpdateSuccess(objects); const overrides = { if_seq_no: 100, if_primary_term: 200 }; - expectClusterCallArgsAction(objects, { method: 'update', overrides }, 2); + expectClientCallArgsAction(objects, { method: 'update', overrides }, 2); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { const getId = (type, id) => `${namespace}:${type}:${id}`; await bulkUpdateSuccess([obj1, obj2], { namespace }); - expectClusterCallArgsAction([obj1, obj2], { method: 'update', getId }); + expectClientCallArgsAction([obj1, obj2], { method: 'update', getId }); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { const getId = (type, id) => `${type}:${id}`; await bulkUpdateSuccess([obj1, obj2]); - expectClusterCallArgsAction([obj1, obj2], { method: 'update', getId }); + expectClientCallArgsAction([obj1, obj2], { method: 'update', getId }); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { const getId = (type, id) => `${type}:${id}`; const objects1 = [{ ...obj1, type: NAMESPACE_AGNOSTIC_TYPE }]; await bulkUpdateSuccess(objects1, { namespace }); - expectClusterCallArgsAction(objects1, { method: 'update', getId }); - callAdminCluster.mockReset(); + expectClientCallArgsAction(objects1, { method: 'update', getId }); + client.bulk.mockClear(); const overrides = { // bulkUpdate uses a preflight `get` request for multi-namespace saved objects, and specifies that version on `update` // we aren't testing for this here, but we need to include Jest assertions so this test doesn't fail @@ -1258,7 +1304,7 @@ describe('SavedObjectsRepository', () => { }; const objects2 = [{ ...obj2, type: MULTI_NAMESPACE_TYPE }]; await bulkUpdateSuccess(objects2, { namespace }); - expectClusterCallArgsAction(objects2, { method: 'update', getId, overrides }, 2); + expectClientCallArgsAction(objects2, { method: 'update', getId, overrides }, 2); }); }); @@ -1274,27 +1320,44 @@ describe('SavedObjectsRepository', () => { if (esError) { mockResponse.items[1].update = { error: esError }; } - callAdminCluster.mockResolvedValue(mockResponse); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); const result = await savedObjectsRepository.bulkUpdate(objects); - expectClusterCalls('bulk'); + expect(client.bulk).toHaveBeenCalled(); const objCall = esError ? expectObjArgs(obj) : []; const body = [...expectObjArgs(obj1), ...objCall, ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body }); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectedError, expectSuccess(obj2)], }); }; const bulkUpdateMultiError = async ([obj1, _obj, obj2], options, mgetResponse) => { - callAdminCluster.mockResolvedValueOnce(mgetResponse); // this._callCluster('mget', ...) + client.mget.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mgetResponse, { + statusCode: mgetResponse.statusCode, + }) + ); + const bulkResponse = getMockBulkUpdateResponse([obj1, obj2], namespace); - callAdminCluster.mockResolvedValue(bulkResponse); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(bulkResponse) + ); const result = await savedObjectsRepository.bulkUpdate([obj1, _obj, obj2], options); - expectClusterCalls('mget', 'bulk'); + expect(client.bulk).toHaveBeenCalled(); + expect(client.mget).toHaveBeenCalled(); const body = [...expectObjArgs(obj1), ...expectObjArgs(obj2)]; - expectClusterCallArgs({ body }, 2); + expect(client.bulk).toHaveBeenCalledWith( + expect.objectContaining({ body }), + expect.anything() + ); + expect(result).toEqual({ saved_objects: [expectSuccess(obj1), expectErrorNotFound(_obj), expectSuccess(obj2)], }); @@ -1318,7 +1381,7 @@ describe('SavedObjectsRepository', () => { it(`returns error when ES is unable to find the index (mget)`, async () => { const _obj = { ...obj, type: MULTI_NAMESPACE_TYPE }; - const mgetResponse = { status: 404 }; + const mgetResponse = { statusCode: 404 }; await bulkUpdateMultiError([obj1, _obj, obj2], { namespace }, mgetResponse); }); @@ -1350,16 +1413,6 @@ describe('SavedObjectsRepository', () => { }); }); - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(bulkUpdateSuccess([obj1, obj2])).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveReturnedTimes(1); - }); - }); - describe('returns', () => { const expectSuccessResult = ({ type, id, attributes, references, namespaces }) => ({ type, @@ -1393,9 +1446,12 @@ describe('SavedObjectsRepository', () => { }; const objects = [obj1, obj, obj2]; const mockResponse = getMockBulkUpdateResponse(objects); - callAdminCluster.mockResolvedValue(mockResponse); // this._writeToCluster('bulk', ...) + client.bulk.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); + const result = await savedObjectsRepository.bulkUpdate(objects); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.bulk).toHaveBeenCalledTimes(1); expect(result).toEqual({ saved_objects: [expectSuccessResult(obj1), expectError(obj), expectSuccessResult(obj2)], }); @@ -1416,10 +1472,12 @@ describe('SavedObjectsRepository', () => { describe('#create', () => { beforeEach(() => { - callAdminCluster.mockImplementation((method, params) => ({ - _id: params.id, - ...mockVersionProps, - })); + client.create.mockImplementation((params) => + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: params.id, + ...mockVersionProps, + }) + ); }); const type = 'index-pattern'; @@ -1436,52 +1494,49 @@ describe('SavedObjectsRepository', () => { const createSuccess = async (type, attributes, options) => { const result = await savedObjectsRepository.create(type, attributes, options); - expect(callAdminCluster).toHaveBeenCalledTimes( - registry.isMultiNamespace(type) && options.overwrite ? 2 : 1 + expect(client.get).toHaveBeenCalledTimes( + registry.isMultiNamespace(type) && options.overwrite ? 1 : 0 ); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES create action if ID is undefined and overwrite=true`, async () => { await createSuccess(type, attributes, { overwrite: true }); - expectClusterCalls('create'); + expect(client.create).toHaveBeenCalled(); }); it(`should use the ES create action if ID is undefined and overwrite=false`, async () => { await createSuccess(type, attributes); - expectClusterCalls('create'); + expect(client.create).toHaveBeenCalled(); }); it(`should use the ES index action if ID is defined and overwrite=true`, async () => { await createSuccess(type, attributes, { id, overwrite: true }); - expectClusterCalls('index'); + expect(client.index).toHaveBeenCalled(); }); it(`should use the ES create action if ID is defined and overwrite=false`, async () => { await createSuccess(type, attributes, { id }); - expectClusterCalls('create'); + expect(client.create).toHaveBeenCalled(); }); it(`should use the ES get action then index action if type is multi-namespace, ID is defined, and overwrite=true`, async () => { await createSuccess(MULTI_NAMESPACE_TYPE, attributes, { id, overwrite: true }); - expectClusterCalls('get', 'index'); + expect(client.get).toHaveBeenCalled(); + expect(client.index).toHaveBeenCalled(); }); it(`defaults to empty references array`, async () => { await createSuccess(type, attributes, { id }); - expectClusterCallArgs({ - body: expect.objectContaining({ references: [] }), - }); + expect(client.create.mock.calls[0][0].body.references).toEqual([]); }); it(`accepts custom references array`, async () => { const test = async (references) => { await createSuccess(type, attributes, { id, references }); - expectClusterCallArgs({ - body: expect.objectContaining({ references }), - }); - callAdminCluster.mockReset(); + expect(client.create.mock.calls[0][0].body.references).toEqual(references); + client.create.mockClear(); }; await test(references); await test(['string']); @@ -1491,10 +1546,8 @@ describe('SavedObjectsRepository', () => { it(`doesn't accept custom references if not an array`, async () => { const test = async (references) => { await createSuccess(type, attributes, { id, references }); - expectClusterCallArgs({ - body: expect.not.objectContaining({ references: expect.anything() }), - }); - callAdminCluster.mockReset(); + expect(client.create.mock.calls[0][0].body.references).not.toBeDefined(); + client.create.mockClear(); }; await test('string'); await test(123); @@ -1504,49 +1557,75 @@ describe('SavedObjectsRepository', () => { it(`defaults to a refresh setting of wait_for`, async () => { await createSuccess(type, attributes); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await createSuccess(type, attributes, { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ refresh: 'wait_for' }), + expect.anything() + ); }); it(`should use default index`, async () => { await createSuccess(type, attributes, { id }); - expectClusterCallArgs({ index: '.kibana-test' }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ index: '.kibana-test' }), + expect.anything() + ); }); it(`should use custom index`, async () => { await createSuccess(CUSTOM_INDEX_TYPE, attributes, { id }); - expectClusterCallArgs({ index: 'custom' }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ index: 'custom' }), + expect.anything() + ); }); it(`self-generates an id if none is provided`, async () => { await createSuccess(type, attributes); - expectClusterCallArgs({ - id: expect.objectContaining(/index-pattern:[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}/), - }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ + id: expect.objectContaining(/index-pattern:[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}/), + }), + expect.anything() + ); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { await createSuccess(type, attributes, { id, namespace }); - expectClusterCallArgs({ id: `${namespace}:${type}:${id}` }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${namespace}:${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { await createSuccess(type, attributes, { id }); - expectClusterCallArgs({ id: `${type}:${id}` }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { await createSuccess(NAMESPACE_AGNOSTIC_TYPE, attributes, { id, namespace }); - expectClusterCallArgs({ id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}` }); - callAdminCluster.mockReset(); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}`, + }), + expect.anything() + ); + client.create.mockClear(); await createSuccess(MULTI_NAMESPACE_TYPE, attributes, { id, namespace }); - expectClusterCallArgs({ id: `${MULTI_NAMESPACE_TYPE}:${id}` }); + expect(client.create).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${MULTI_NAMESPACE_TYPE}:${id}`, + }), + expect.anything() + ); }); }); @@ -1555,14 +1634,14 @@ describe('SavedObjectsRepository', () => { await expect(savedObjectsRepository.create('unknownType', attributes)).rejects.toThrowError( createUnsupportedTypeError('unknownType') ); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.create).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expect(savedObjectsRepository.create(HIDDEN_TYPE, attributes)).rejects.toThrowError( createUnsupportedTypeError(HIDDEN_TYPE) ); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.create).not.toHaveBeenCalled(); }); it(`throws when there is a conflict with an existing multi-namespace saved object (get)`, async () => { @@ -1571,7 +1650,9 @@ describe('SavedObjectsRepository', () => { id, namespace: 'bar-namespace', }); - callAdminCluster.mockResolvedValue(response); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); await expect( savedObjectsRepository.create(MULTI_NAMESPACE_TYPE, attributes, { id, @@ -1579,16 +1660,12 @@ describe('SavedObjectsRepository', () => { namespace, }) ).rejects.toThrowError(createConflictError(MULTI_NAMESPACE_TYPE, id)); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalled(); }); - it(`throws when automatic index creation fails`, async () => { - // TODO - }); + it.todo(`throws when automatic index creation fails`); - it(`throws when an unexpected failure occurs`, async () => { - // TODO - }); + it.todo(`throws when an unexpected failure occurs`); }); describe('migration', () => { @@ -1596,14 +1673,6 @@ describe('SavedObjectsRepository', () => { migrator.migrateDocument.mockImplementation(mockMigrateDocument); }); - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(createSuccess(type, attributes, { id, namespace })).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); - }); - it(`migrates a document and serializes the migrated doc`, async () => { const migrationVersion = mockMigrationVersion; await createSuccess(type, attributes, { id, references, migrationVersion }); @@ -1628,7 +1697,7 @@ describe('SavedObjectsRepository', () => { await createSuccess(NAMESPACE_AGNOSTIC_TYPE, attributes, { id, namespace }); expectMigrationArgs({ namespace: expect.anything() }, false, 1); - callAdminCluster.mockReset(); + client.create.mockClear(); await createSuccess(MULTI_NAMESPACE_TYPE, attributes, { id }); expectMigrationArgs({ namespace: expect.anything() }, false, 2); }); @@ -1647,7 +1716,7 @@ describe('SavedObjectsRepository', () => { await createSuccess(type, attributes, { id }); expectMigrationArgs({ namespaces: expect.anything() }, false, 1); - callAdminCluster.mockReset(); + client.create.mockClear(); await createSuccess(NAMESPACE_AGNOSTIC_TYPE, attributes, { id }); expectMigrationArgs({ namespaces: expect.anything() }, false, 2); }); @@ -1678,33 +1747,43 @@ describe('SavedObjectsRepository', () => { const deleteSuccess = async (type, id, options) => { if (registry.isMultiNamespace(type)) { const mockGetResponse = getMockGetResponse({ type, id, namespace: options?.namespace }); - callAdminCluster.mockResolvedValueOnce(mockGetResponse); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockGetResponse) + ); } - callAdminCluster.mockResolvedValue({ result: 'deleted' }); // this._writeToCluster('delete', ...) + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ result: 'deleted' }) + ); const result = await savedObjectsRepository.delete(type, id, options); - expect(callAdminCluster).toHaveBeenCalledTimes(registry.isMultiNamespace(type) ? 2 : 1); + expect(client.get).toHaveBeenCalledTimes(registry.isMultiNamespace(type) ? 1 : 0); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES delete action when not using a multi-namespace type`, async () => { await deleteSuccess(type, id); - expectClusterCalls('delete'); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`should use ES get action then delete action when using a multi-namespace type with no namespaces remaining`, async () => { await deleteSuccess(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get', 'delete'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`should use ES get action then update action when using a multi-namespace type with one or more namespaces remaining`, async () => { const mockResponse = getMockGetResponse({ type: MULTI_NAMESPACE_TYPE, id }); mockResponse._source.namespaces = ['default', 'some-other-nameespace']; - callAdminCluster - .mockResolvedValueOnce(mockResponse) // this._callCluster('get', ...) - .mockResolvedValue({ result: 'updated' }); // this._writeToCluster('update', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ result: 'updated' }) + ); + await savedObjectsRepository.delete(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get', 'update'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`includes the version of the existing document when type is multi-namespace`, async () => { @@ -1713,37 +1792,49 @@ describe('SavedObjectsRepository', () => { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgs(versionProperties, 2); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining(versionProperties), + expect.anything() + ); }); it(`defaults to a refresh setting of wait_for`, async () => { await deleteSuccess(type, id); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await deleteSuccess(type, id, { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ refresh: 'wait_for' }), + expect.anything() + ); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { await deleteSuccess(type, id, { namespace }); - expectClusterCallArgs({ id: `${namespace}:${type}:${id}` }); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ id: `${namespace}:${type}:${id}` }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { await deleteSuccess(type, id); - expectClusterCallArgs({ id: `${type}:${id}` }); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ id: `${type}:${id}` }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { await deleteSuccess(NAMESPACE_AGNOSTIC_TYPE, id, { namespace }); - expectClusterCallArgs({ id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}` }); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}` }), + expect.anything() + ); - callAdminCluster.mockReset(); + client.delete.mockClear(); await deleteSuccess(MULTI_NAMESPACE_TYPE, id, { namespace }); - expectClusterCallArgs({ id: `${MULTI_NAMESPACE_TYPE}:${id}` }); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ id: `${MULTI_NAMESPACE_TYPE}:${id}` }), + expect.anything() + ); }); }); @@ -1756,73 +1847,82 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectNotFoundError('unknownType', id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectNotFoundError(HIDDEN_TYPE, id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); }); it(`throws when ES is unable to find the document during get`, async () => { - callAdminCluster.mockResolvedValue({ found: false }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ found: false }) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during get`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when the type is multi-namespace and the document exists, but not in this namespace`, async () => { const response = getMockGetResponse({ type: MULTI_NAMESPACE_TYPE, id, namespace }); - callAdminCluster.mockResolvedValue(response); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id, { namespace: 'bar-namespace' }); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during update`, async () => { const mockResponse = getMockGetResponse({ type: MULTI_NAMESPACE_TYPE, id }); mockResponse._source.namespaces = ['default', 'some-other-nameespace']; - callAdminCluster - .mockResolvedValueOnce(mockResponse) // this._callCluster('get', ...) - .mockResolvedValue({ status: 404 }); // this._writeToCluster('update', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); + await expectNotFoundError(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get', 'update'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during delete`, async () => { - callAdminCluster.mockResolvedValue({ result: 'not_found' }); // this._writeToCluster('delete', ...) + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ result: 'not_found' }) + ); await expectNotFoundError(type, id); - expectClusterCalls('delete'); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during delete`, async () => { - callAdminCluster.mockResolvedValue({ error: { type: 'index_not_found_exception' } }); // this._writeToCluster('delete', ...) + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + error: { type: 'index_not_found_exception' }, + }) + ); await expectNotFoundError(type, id); - expectClusterCalls('delete'); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`throws when ES returns an unexpected response`, async () => { - callAdminCluster.mockResolvedValue({ result: 'something unexpected' }); // this._writeToCluster('delete', ...) + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + result: 'something unexpected', + }) + ); await expect(savedObjectsRepository.delete(type, id)).rejects.toThrowError( 'Unexpected Elasticsearch DELETE response' ); - expectClusterCalls('delete'); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - let callAdminClusterCount = 0; - migrator.runMigrations = jest.fn(async () => - // runMigrations should resolve before callAdminCluster is initiated - expect(callAdminCluster).toHaveBeenCalledTimes(callAdminClusterCount++) - ); - await expect(deleteSuccess(type, id)).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); + expect(client.delete).toHaveBeenCalledTimes(1); }); }); @@ -1853,33 +1953,27 @@ describe('SavedObjectsRepository', () => { }; const deleteByNamespaceSuccess = async (namespace, options) => { - callAdminCluster.mockResolvedValue(mockUpdateResults); + client.updateByQuery.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockUpdateResults) + ); const result = await savedObjectsRepository.deleteByNamespace(namespace, options); expect(getSearchDslNS.getSearchDsl).toHaveBeenCalledTimes(1); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.updateByQuery).toHaveBeenCalledTimes(1); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES updateByQuery action`, async () => { await deleteByNamespaceSuccess(namespace); - expectClusterCalls('updateByQuery'); - }); - - it(`defaults to a refresh setting of wait_for`, async () => { - await deleteByNamespaceSuccess(namespace); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await deleteByNamespaceSuccess(namespace, { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.updateByQuery).toHaveBeenCalledTimes(1); }); it(`should use all indices for types that are not namespace-agnostic`, async () => { await deleteByNamespaceSuccess(namespace); - expectClusterCallArgs({ index: ['.kibana-test', 'custom'] }, 1); + expect(client.updateByQuery).toHaveBeenCalledWith( + expect.objectContaining({ index: ['.kibana-test', 'custom'] }), + expect.anything() + ); }); }); @@ -1889,7 +1983,7 @@ describe('SavedObjectsRepository', () => { await expect(savedObjectsRepository.deleteByNamespace(namespace)).rejects.toThrowError( `namespace is required, and must be a string` ); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.updateByQuery).not.toHaveBeenCalled(); }; await test(undefined); await test(['namespace']); @@ -1898,16 +1992,6 @@ describe('SavedObjectsRepository', () => { }); }); - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(deleteByNamespaceSuccess(namespace)).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); - }); - }); - describe('returns', () => { it(`returns the query results on success`, async () => { const result = await deleteByNamespaceSuccess(namespace); @@ -2002,64 +2086,90 @@ describe('SavedObjectsRepository', () => { const namespace = 'foo-namespace'; const findSuccess = async (options, namespace) => { - callAdminCluster.mockResolvedValue(generateSearchResults(namespace)); + client.search.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise( + generateSearchResults(namespace) + ) + ); const result = await savedObjectsRepository.find(options); expect(getSearchDslNS.getSearchDsl).toHaveBeenCalledTimes(1); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.search).toHaveBeenCalledTimes(1); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES search action`, async () => { await findSuccess({ type }); - expectClusterCalls('search'); + expect(client.search).toHaveBeenCalledTimes(1); }); it(`merges output of getSearchDsl into es request body`, async () => { const query = { query: 1, aggregations: 2 }; getSearchDslNS.getSearchDsl.mockReturnValue(query); await findSuccess({ type }); - expectClusterCallArgs({ body: expect.objectContaining({ ...query }) }); + + expect(client.search).toHaveBeenCalledWith( + expect.objectContaining({ + body: expect.objectContaining({ ...query }), + }), + expect.anything() + ); }); it(`accepts per_page/page`, async () => { await findSuccess({ type, perPage: 10, page: 6 }); - expectClusterCallArgs({ - size: 10, - from: 50, - }); + expect(client.search).toHaveBeenCalledWith( + expect.objectContaining({ + size: 10, + from: 50, + }), + expect.anything() + ); }); it(`accepts preference`, async () => { await findSuccess({ type, preference: 'pref' }); - expectClusterCallArgs({ preference: 'pref' }); + expect(client.search).toHaveBeenCalledWith( + expect.objectContaining({ + preference: 'pref', + }), + expect.anything() + ); }); it(`can filter by fields`, async () => { await findSuccess({ type, fields: ['title'] }); - expectClusterCallArgs({ - _source: [ - `${type}.title`, - 'namespace', - 'namespaces', - 'type', - 'references', - 'migrationVersion', - 'updated_at', - 'title', - ], - }); + expect(client.search).toHaveBeenCalledWith( + expect.objectContaining({ + _source: [ + `${type}.title`, + 'namespace', + 'namespaces', + 'type', + 'references', + 'migrationVersion', + 'updated_at', + 'title', + ], + }), + expect.anything() + ); }); it(`should set rest_total_hits_as_int to true on a request`, async () => { await findSuccess({ type }); - expectClusterCallArgs({ rest_total_hits_as_int: true }); + expect(client.search).toHaveBeenCalledWith( + expect.objectContaining({ + rest_total_hits_as_int: true, + }), + expect.anything() + ); }); - it(`should not make a cluster call when attempting to find only invalid or hidden types`, async () => { + it(`should not make a client call when attempting to find only invalid or hidden types`, async () => { const test = async (types) => { await savedObjectsRepository.find({ type: types }); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.search).not.toHaveBeenCalled(); }; await test('unknownType'); @@ -2073,21 +2183,21 @@ describe('SavedObjectsRepository', () => { await expect(savedObjectsRepository.find({})).rejects.toThrowError( 'options.type must be a string or an array of strings' ); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.search).not.toHaveBeenCalled(); }); it(`throws when searchFields is defined but not an array`, async () => { await expect( savedObjectsRepository.find({ type, searchFields: 'string' }) ).rejects.toThrowError('options.searchFields must be an array'); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.search).not.toHaveBeenCalled(); }); it(`throws when fields is defined but not an array`, async () => { await expect(savedObjectsRepository.find({ type, fields: 'string' })).rejects.toThrowError( 'options.fields must be an array' ); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.search).not.toHaveBeenCalled(); }); it(`throws when KQL filter syntax is invalid`, async () => { @@ -2113,24 +2223,16 @@ describe('SavedObjectsRepository', () => { --------------------------------^: Bad Request] `); expect(getSearchDslNS.getSearchDsl).not.toHaveBeenCalled(); - expect(callAdminCluster).not.toHaveBeenCalled(); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect(findSuccess({ type })).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); + expect(client.search).not.toHaveBeenCalled(); }); }); describe('returns', () => { it(`formats the ES response when there is no namespace`, async () => { const noNamespaceSearchResults = generateSearchResults(); - callAdminCluster.mockReturnValue(noNamespaceSearchResults); + client.search.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(noNamespaceSearchResults) + ); const count = noNamespaceSearchResults.hits.hits.length; const response = await savedObjectsRepository.find({ type }); @@ -2154,7 +2256,9 @@ describe('SavedObjectsRepository', () => { it(`formats the ES response when there is a namespace`, async () => { const namespacedSearchResults = generateSearchResults(namespace); - callAdminCluster.mockReturnValue(namespacedSearchResults); + client.search.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(namespacedSearchResults) + ); const count = namespacedSearchResults.hits.hits.length; const response = await savedObjectsRepository.find({ type, namespaces: [namespace] }); @@ -2298,35 +2402,57 @@ describe('SavedObjectsRepository', () => { const getSuccess = async (type, id, options) => { const response = getMockGetResponse({ type, id, namespace: options?.namespace }); - callAdminCluster.mockResolvedValue(response); + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); const result = await savedObjectsRepository.get(type, id, options); - expect(callAdminCluster).toHaveBeenCalledTimes(1); + expect(client.get).toHaveBeenCalledTimes(1); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES get action`, async () => { await getSuccess(type, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { await getSuccess(type, id, { namespace }); - expectClusterCallArgs({ id: `${namespace}:${type}:${id}` }); + expect(client.get).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${namespace}:${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { await getSuccess(type, id); - expectClusterCallArgs({ id: `${type}:${id}` }); + expect(client.get).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { await getSuccess(NAMESPACE_AGNOSTIC_TYPE, id, { namespace }); - expectClusterCallArgs({ id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}` }); + expect(client.get).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}`, + }), + expect.anything() + ); - callAdminCluster.mockReset(); + client.get.mockClear(); await getSuccess(MULTI_NAMESPACE_TYPE, id, { namespace }); - expectClusterCallArgs({ id: `${MULTI_NAMESPACE_TYPE}:${id}` }); + expect(client.get).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${MULTI_NAMESPACE_TYPE}:${id}`, + }), + expect.anything() + ); }); }); @@ -2339,41 +2465,37 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectNotFoundError('unknownType', id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.get).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectNotFoundError(HIDDEN_TYPE, id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.get).not.toHaveBeenCalled(); }); it(`throws when ES is unable to find the document during get`, async () => { - callAdminCluster.mockResolvedValue({ found: false }); + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ found: false }) + ); await expectNotFoundError(type, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during get`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(type, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when type is multi-namespace and the document exists, but not in this namespace`, async () => { const response = getMockGetResponse({ type: MULTI_NAMESPACE_TYPE, id, namespace }); - callAdminCluster.mockResolvedValue(response); - await expectNotFoundError(MULTI_NAMESPACE_TYPE, id, { namespace: 'bar-namespace' }); - expectClusterCalls('get'); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) ); - await expect(getSuccess(type, id)).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); + await expectNotFoundError(MULTI_NAMESPACE_TYPE, id, { namespace: 'bar-namespace' }); + expect(client.get).toHaveBeenCalledTimes(1); }); }); @@ -2419,68 +2541,93 @@ describe('SavedObjectsRepository', () => { const isMultiNamespace = registry.isMultiNamespace(type); if (isMultiNamespace) { const response = getMockGetResponse({ type, id, namespace: options?.namespace }); - callAdminCluster.mockResolvedValueOnce(response); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); } - callAdminCluster.mockImplementation((method, params) => ({ - _id: params.id, - ...mockVersionProps, - _index: '.kibana', - get: { - found: true, - _source: { - type, - ...mockTimestampFields, - [type]: { - [field]: 8468, - defaultIndex: 'logstash-*', + client.update.mockImplementation((params) => + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: params.id, + ...mockVersionProps, + _index: '.kibana', + get: { + found: true, + _source: { + type, + ...mockTimestampFields, + [type]: { + [field]: 8468, + defaultIndex: 'logstash-*', + }, }, }, - }, - })); + }) + ); + const result = await savedObjectsRepository.incrementCounter(type, id, field, options); - expect(callAdminCluster).toHaveBeenCalledTimes(isMultiNamespace ? 2 : 1); + expect(client.get).toHaveBeenCalledTimes(isMultiNamespace ? 1 : 0); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES update action if type is not multi-namespace`, async () => { await incrementCounterSuccess(type, id, field, { namespace }); - expectClusterCalls('update'); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`should use the ES get action then update action if type is multi-namespace, ID is defined, and overwrite=true`, async () => { await incrementCounterSuccess(MULTI_NAMESPACE_TYPE, id, field, { namespace }); - expectClusterCalls('get', 'update'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`defaults to a refresh setting of wait_for`, async () => { await incrementCounterSuccess(type, id, field, { namespace }); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await incrementCounterSuccess(type, id, field, { namespace, refresh }); - expectClusterCallArgs({ refresh }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + refresh: 'wait_for', + }), + expect.anything() + ); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { await incrementCounterSuccess(type, id, field, { namespace }); - expectClusterCallArgs({ id: `${namespace}:${type}:${id}` }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${namespace}:${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { await incrementCounterSuccess(type, id, field); - expectClusterCallArgs({ id: `${type}:${id}` }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { await incrementCounterSuccess(NAMESPACE_AGNOSTIC_TYPE, id, field, { namespace }); - expectClusterCallArgs({ id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}` }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${NAMESPACE_AGNOSTIC_TYPE}:${id}`, + }), + expect.anything() + ); - callAdminCluster.mockReset(); + client.update.mockClear(); await incrementCounterSuccess(MULTI_NAMESPACE_TYPE, id, field, { namespace }); - expectClusterCallArgs({ id: `${MULTI_NAMESPACE_TYPE}:${id}` }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${MULTI_NAMESPACE_TYPE}:${id}`, + }), + expect.anything() + ); }); }); @@ -2496,7 +2643,7 @@ describe('SavedObjectsRepository', () => { await expect( savedObjectsRepository.incrementCounter(type, id, field) ).rejects.toThrowError(`"type" argument must be a string`); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test(null); @@ -2510,7 +2657,7 @@ describe('SavedObjectsRepository', () => { await expect( savedObjectsRepository.incrementCounter(type, id, field) ).rejects.toThrowError(`"counterFieldName" argument must be a string`); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test(null); @@ -2521,12 +2668,12 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectUnsupportedTypeError('unknownType', id, field); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectUnsupportedTypeError(HIDDEN_TYPE, id, field); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when there is a conflict with an existing multi-namespace saved object (get)`, async () => { @@ -2535,11 +2682,13 @@ describe('SavedObjectsRepository', () => { id, namespace: 'bar-namespace', }); - callAdminCluster.mockResolvedValue(response); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); await expect( savedObjectsRepository.incrementCounter(MULTI_NAMESPACE_TYPE, id, field, { namespace }) ).rejects.toThrowError(createConflictError(MULTI_NAMESPACE_TYPE, id)); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); }); @@ -2548,16 +2697,6 @@ describe('SavedObjectsRepository', () => { migrator.migrateDocument.mockImplementation(mockMigrateDocument); }); - it(`waits until migrations are complete before proceeding`, async () => { - migrator.runMigrations = jest.fn(async () => - expect(callAdminCluster).not.toHaveBeenCalled() - ); - await expect( - incrementCounterSuccess(type, id, field, { namespace }) - ).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveBeenCalledTimes(1); - }); - it(`migrates a document and serializes the migrated doc`, async () => { const migrationVersion = mockMigrationVersion; await incrementCounterSuccess(type, id, field, { migrationVersion }); @@ -2572,22 +2711,24 @@ describe('SavedObjectsRepository', () => { describe('returns', () => { it(`formats the ES response`, async () => { - callAdminCluster.mockImplementation((method, params) => ({ - _id: params.id, - ...mockVersionProps, - _index: '.kibana', - get: { - found: true, - _source: { - type: 'config', - ...mockTimestampFields, - config: { - buildNum: 8468, - defaultIndex: 'logstash-*', + client.update.mockImplementation((params) => + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: params.id, + ...mockVersionProps, + _index: '.kibana', + get: { + found: true, + _source: { + type: 'config', + ...mockTimestampFields, + config: { + buildNum: 8468, + defaultIndex: 'logstash-*', + }, }, }, - }, - })); + }) + ); const response = await savedObjectsRepository.incrementCounter( 'config', @@ -2623,7 +2764,9 @@ describe('SavedObjectsRepository', () => { // mock a document that exists in two namespaces const mockResponse = getMockGetResponse({ type, id }); mockResponse._source.namespaces = namespaces; - callAdminCluster.mockResolvedValueOnce(mockResponse); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockResponse) + ); }; const deleteFromNamespacesSuccess = async ( @@ -2633,71 +2776,96 @@ describe('SavedObjectsRepository', () => { currentNamespaces, options ) => { - mockGetResponse(type, id, currentNamespaces); // this._callCluster('get', ...) - const isDelete = currentNamespaces.every((namespace) => namespaces.includes(namespace)); - callAdminCluster.mockResolvedValue({ - _id: `${type}:${id}`, - ...mockVersionProps, - result: isDelete ? 'deleted' : 'updated', - }); // this._writeToCluster('delete', ...) *or* this._writeToCluster('update', ...) - const result = await savedObjectsRepository.deleteFromNamespaces( - type, - id, - namespaces, - options + mockGetResponse(type, id, currentNamespaces); + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: `${type}:${id}`, + ...mockVersionProps, + result: 'deleted', + }) ); - expect(callAdminCluster).toHaveBeenCalledTimes(2); - return result; + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: `${type}:${id}`, + ...mockVersionProps, + result: 'updated', + }) + ); + + return await savedObjectsRepository.deleteFromNamespaces(type, id, namespaces, options); }; - describe('cluster calls', () => { + describe('client calls', () => { describe('delete action', () => { const deleteFromNamespacesSuccessDelete = async (expectFn, options, _type = type) => { const test = async (namespaces) => { await deleteFromNamespacesSuccess(_type, id, namespaces, namespaces, options); expectFn(); - callAdminCluster.mockReset(); + client.delete.mockClear(); + client.get.mockClear(); }; await test([namespace1]); await test([namespace1, namespace2]); }; it(`should use ES get action then delete action if the object has no namespaces remaining`, async () => { - const expectFn = () => expectClusterCalls('get', 'delete'); + const expectFn = () => { + expect(client.delete).toHaveBeenCalledTimes(1); + expect(client.get).toHaveBeenCalledTimes(1); + }; await deleteFromNamespacesSuccessDelete(expectFn); }); it(`formats the ES requests`, async () => { const expectFn = () => { - expectClusterCallArgs({ id: `${type}:${id}` }, 1); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + }), + expect.anything() + ); + const versionProperties = { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgs({ id: `${type}:${id}`, ...versionProperties }, 2); + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + ...versionProperties, + }), + expect.anything() + ); }; await deleteFromNamespacesSuccessDelete(expectFn); }); it(`defaults to a refresh setting of wait_for`, async () => { await deleteFromNamespacesSuccessDelete(() => - expectClusterCallArgs({ refresh: 'wait_for' }, 2) + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ + refresh: 'wait_for', + }), + expect.anything() + ) ); }); - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - const expectFn = () => expectClusterCallArgs({ refresh }, 2); - await deleteFromNamespacesSuccessDelete(expectFn, { refresh }); - }); - it(`should use default index`, async () => { - const expectFn = () => expectClusterCallArgs({ index: '.kibana-test' }, 2); + const expectFn = () => + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ index: '.kibana-test' }), + expect.anything() + ); await deleteFromNamespacesSuccessDelete(expectFn); }); it(`should use custom index`, async () => { - const expectFn = () => expectClusterCallArgs({ index: 'custom' }, 2); + const expectFn = () => + expect(client.delete).toHaveBeenCalledWith( + expect.objectContaining({ index: 'custom' }), + expect.anything() + ); await deleteFromNamespacesSuccessDelete(expectFn, {}, MULTI_NAMESPACE_CUSTOM_INDEX_TYPE); }); }); @@ -2708,55 +2876,73 @@ describe('SavedObjectsRepository', () => { const currentNamespaces = [namespace1].concat(remaining); await deleteFromNamespacesSuccess(_type, id, [namespace1], currentNamespaces, options); expectFn(); - callAdminCluster.mockReset(); + client.get.mockClear(); + client.update.mockClear(); }; await test([namespace2]); await test([namespace2, namespace3]); }; it(`should use ES get action then update action if the object has one or more namespaces remaining`, async () => { - await deleteFromNamespacesSuccessUpdate(() => expectClusterCalls('get', 'update')); + const expectFn = () => { + expect(client.update).toHaveBeenCalledTimes(1); + expect(client.get).toHaveBeenCalledTimes(1); + }; + await deleteFromNamespacesSuccessUpdate(expectFn); }); it(`formats the ES requests`, async () => { let ctr = 0; const expectFn = () => { - expectClusterCallArgs({ id: `${type}:${id}` }, 1); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: `${type}:${id}`, + }), + expect.anything() + ); const namespaces = ctr++ === 0 ? [namespace2] : [namespace2, namespace3]; const versionProperties = { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgs( - { + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ id: `${type}:${id}`, ...versionProperties, body: { doc: { ...mockTimestampFields, namespaces } }, - }, - 2 + }), + expect.anything() ); }; await deleteFromNamespacesSuccessUpdate(expectFn); }); it(`defaults to a refresh setting of wait_for`, async () => { - const expectFn = () => expectClusterCallArgs({ refresh: 'wait_for' }, 2); + const expectFn = () => + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + refresh: 'wait_for', + }), + expect.anything() + ); await deleteFromNamespacesSuccessUpdate(expectFn); }); - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - const expectFn = () => expectClusterCallArgs({ refresh }, 2); - await deleteFromNamespacesSuccessUpdate(expectFn, { refresh }); - }); - it(`should use default index`, async () => { - const expectFn = () => expectClusterCallArgs({ index: '.kibana-test' }, 2); + const expectFn = () => + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ index: '.kibana-test' }), + expect.anything() + ); await deleteFromNamespacesSuccessUpdate(expectFn); }); it(`should use custom index`, async () => { - const expectFn = () => expectClusterCallArgs({ index: 'custom' }, 2); + const expectFn = () => + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ index: 'custom' }), + expect.anything() + ); await deleteFromNamespacesSuccessUpdate(expectFn, {}, MULTI_NAMESPACE_CUSTOM_INDEX_TYPE); }); }); @@ -2776,19 +2962,22 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectNotFoundError('unknownType', id, [namespace1, namespace2]); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectNotFoundError(HIDDEN_TYPE, id, [namespace1, namespace2]); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is not namespace-agnostic`, async () => { const test = async (type) => { const message = `${type} doesn't support multiple namespaces`; await expectBadRequestError(type, id, [namespace1, namespace2], message); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test('index-pattern'); await test(NAMESPACE_AGNOSTIC_TYPE); @@ -2798,71 +2987,78 @@ describe('SavedObjectsRepository', () => { const test = async (namespaces) => { const message = 'namespaces must be a non-empty array of strings'; await expectBadRequestError(type, id, namespaces, message); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.delete).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }; await test([]); }); it(`throws when ES is unable to find the document during get`, async () => { - callAdminCluster.mockResolvedValue({ found: false }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ found: false }) + ); await expectNotFoundError(type, id, [namespace1, namespace2]); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during get`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(type, id, [namespace1, namespace2]); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when the document exists, but not in this namespace`, async () => { - mockGetResponse(type, id, [namespace1]); // this._callCluster('get', ...) + mockGetResponse(type, id, [namespace1]); await expectNotFoundError(type, id, [namespace1], { namespace: 'some-other-namespace' }); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during delete`, async () => { - mockGetResponse(type, id, [namespace1]); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ result: 'not_found' }); // this._writeToCluster('delete', ...) + mockGetResponse(type, id, [namespace1]); + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ result: 'not_found' }) + ); await expectNotFoundError(type, id, [namespace1]); - expectClusterCalls('get', 'delete'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during delete`, async () => { - mockGetResponse(type, id, [namespace1]); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ error: { type: 'index_not_found_exception' } }); // this._writeToCluster('delete', ...) + mockGetResponse(type, id, [namespace1]); + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + error: { type: 'index_not_found_exception' }, + }) + ); await expectNotFoundError(type, id, [namespace1]); - expectClusterCalls('get', 'delete'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`throws when ES returns an unexpected response`, async () => { - mockGetResponse(type, id, [namespace1]); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ result: 'something unexpected' }); // this._writeToCluster('delete', ...) + mockGetResponse(type, id, [namespace1]); + client.delete.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + result: 'something unexpected', + }) + ); await expect( savedObjectsRepository.deleteFromNamespaces(type, id, [namespace1]) ).rejects.toThrowError('Unexpected Elasticsearch DELETE response'); - expectClusterCalls('get', 'delete'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.delete).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during update`, async () => { - mockGetResponse(type, id, [namespace1, namespace2]); // this._callCluster('get', ...) - callAdminCluster.mockResolvedValue({ status: 404 }); // this._writeToCluster('update', ...) - await expectNotFoundError(type, id, [namespace1]); - expectClusterCalls('get', 'update'); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - let callAdminClusterCount = 0; - migrator.runMigrations = jest.fn(async () => - // runMigrations should resolve before callAdminCluster is initiated - expect(callAdminCluster).toHaveBeenCalledTimes(callAdminClusterCount++) + mockGetResponse(type, id, [namespace1, namespace2]); + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) ); - await expect( - deleteFromNamespacesSuccess(type, id, [namespace1], [namespace1]) - ).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveReturnedTimes(2); + await expectNotFoundError(type, id, [namespace1]); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); }); @@ -2871,7 +3067,7 @@ describe('SavedObjectsRepository', () => { const test = async (namespaces) => { const result = await deleteFromNamespacesSuccess(type, id, namespaces, namespaces); expect(result).toEqual({}); - callAdminCluster.mockReset(); + client.delete.mockClear(); }; await test([namespace1]); await test([namespace1, namespace2]); @@ -2887,7 +3083,7 @@ describe('SavedObjectsRepository', () => { currentNamespaces ); expect(result).toEqual({}); - callAdminCluster.mockReset(); + client.delete.mockClear(); }; await test([namespace2]); await test([namespace2, namespace3]); @@ -2918,47 +3114,61 @@ describe('SavedObjectsRepository', () => { const updateSuccess = async (type, id, attributes, options) => { if (registry.isMultiNamespace(type)) { const mockGetResponse = getMockGetResponse({ type, id, namespace: options?.namespace }); - callAdminCluster.mockResolvedValueOnce(mockGetResponse); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(mockGetResponse) + ); } - callAdminCluster.mockResolvedValue({ - _id: `${type}:${id}`, - ...mockVersionProps, - result: 'updated', - // don't need the rest of the source for test purposes, just the namespace and namespaces attributes - get: { - _source: { namespaces: [options?.namespace ?? 'default'], namespace: options?.namespace }, - }, - }); // this._writeToCluster('update', ...) + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ + _id: `${type}:${id}`, + ...mockVersionProps, + result: 'updated', + // don't need the rest of the source for test purposes, just the namespace and namespaces attributes + get: { + _source: { + namespaces: [options?.namespace ?? 'default'], + namespace: options?.namespace, + }, + }, + }) + ); const result = await savedObjectsRepository.update(type, id, attributes, options); - expect(callAdminCluster).toHaveBeenCalledTimes(registry.isMultiNamespace(type) ? 2 : 1); + expect(client.get).toHaveBeenCalledTimes(registry.isMultiNamespace(type) ? 1 : 0); return result; }; - describe('cluster calls', () => { + describe('client calls', () => { it(`should use the ES get action then update action when type is multi-namespace`, async () => { await updateSuccess(MULTI_NAMESPACE_TYPE, id, attributes); - expectClusterCalls('get', 'update'); + expect(client.get).toHaveBeenCalledTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`should use the ES update action when type is not multi-namespace`, async () => { await updateSuccess(type, id, attributes); - expectClusterCalls('update'); + expect(client.update).toHaveBeenCalledTimes(1); }); it(`defaults to no references array`, async () => { await updateSuccess(type, id, attributes); - expectClusterCallArgs({ - body: { doc: expect.not.objectContaining({ references: expect.anything() }) }, - }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + body: { doc: expect.not.objectContaining({ references: expect.anything() }) }, + }), + expect.anything() + ); }); it(`accepts custom references array`, async () => { const test = async (references) => { await updateSuccess(type, id, attributes, { references }); - expectClusterCallArgs({ - body: { doc: expect.objectContaining({ references }) }, - }); - callAdminCluster.mockReset(); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + body: { doc: expect.objectContaining({ references }) }, + }), + expect.anything() + ); + client.update.mockClear(); }; await test(references); await test(['string']); @@ -2968,10 +3178,13 @@ describe('SavedObjectsRepository', () => { it(`doesn't accept custom references if not an array`, async () => { const test = async (references) => { await updateSuccess(type, id, attributes, { references }); - expectClusterCallArgs({ - body: { doc: expect.not.objectContaining({ references: expect.anything() }) }, - }); - callAdminCluster.mockReset(); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + body: { doc: expect.not.objectContaining({ references: expect.anything() }) }, + }), + expect.anything() + ); + client.update.mockClear(); }; await test('string'); await test(123); @@ -2981,13 +3194,12 @@ describe('SavedObjectsRepository', () => { it(`defaults to a refresh setting of wait_for`, async () => { await updateSuccess(type, id, { foo: 'bar' }); - expectClusterCallArgs({ refresh: 'wait_for' }); - }); - - it(`accepts a custom refresh setting`, async () => { - const refresh = 'foo'; - await updateSuccess(type, id, { foo: 'bar' }, { refresh }); - expectClusterCallArgs({ refresh }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + refresh: 'wait_for', + }), + expect.anything() + ); }); it(`defaults to the version of the existing document when type is multi-namespace`, async () => { @@ -2996,47 +3208,70 @@ describe('SavedObjectsRepository', () => { if_seq_no: mockVersionProps._seq_no, if_primary_term: mockVersionProps._primary_term, }; - expectClusterCallArgs(versionProperties, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining(versionProperties), + expect.anything() + ); }); it(`accepts version`, async () => { await updateSuccess(type, id, attributes, { version: encodeHitVersion({ _seq_no: 100, _primary_term: 200 }), }); - expectClusterCallArgs({ if_seq_no: 100, if_primary_term: 200 }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ if_seq_no: 100, if_primary_term: 200 }), + expect.anything() + ); }); it(`prepends namespace to the id when providing namespace for single-namespace type`, async () => { await updateSuccess(type, id, attributes, { namespace }); - expectClusterCallArgs({ id: expect.stringMatching(`${namespace}:${type}:${id}`) }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ id: expect.stringMatching(`${namespace}:${type}:${id}`) }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when providing no namespace for single-namespace type`, async () => { await updateSuccess(type, id, attributes, { references }); - expectClusterCallArgs({ id: expect.stringMatching(`${type}:${id}`) }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ id: expect.stringMatching(`${type}:${id}`) }), + expect.anything() + ); }); it(`doesn't prepend namespace to the id when not using single-namespace type`, async () => { await updateSuccess(NAMESPACE_AGNOSTIC_TYPE, id, attributes, { namespace }); - expectClusterCallArgs({ id: expect.stringMatching(`${NAMESPACE_AGNOSTIC_TYPE}:${id}`) }); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ + id: expect.stringMatching(`${NAMESPACE_AGNOSTIC_TYPE}:${id}`), + }), + expect.anything() + ); - callAdminCluster.mockReset(); + client.update.mockClear(); await updateSuccess(MULTI_NAMESPACE_TYPE, id, attributes, { namespace }); - expectClusterCallArgs({ id: expect.stringMatching(`${MULTI_NAMESPACE_TYPE}:${id}`) }, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ id: expect.stringMatching(`${MULTI_NAMESPACE_TYPE}:${id}`) }), + expect.anything() + ); }); - it(`includes _sourceIncludes when type is multi-namespace`, async () => { + it(`includes _source_includes when type is multi-namespace`, async () => { await updateSuccess(MULTI_NAMESPACE_TYPE, id, attributes); - expectClusterCallArgs({ _sourceIncludes: ['namespace', 'namespaces'] }, 2); + expect(client.update).toHaveBeenCalledWith( + expect.objectContaining({ _source_includes: ['namespace', 'namespaces'] }), + expect.anything() + ); }); - it(`includes _sourceIncludes when type is not multi-namespace`, async () => { + it(`includes _source_includes when type is not multi-namespace`, async () => { await updateSuccess(type, id, attributes); - expect(callAdminCluster).toHaveBeenLastCalledWith( - expect.any(String), + expect(client.update).toHaveBeenLastCalledWith( expect.objectContaining({ - _sourceIncludes: ['namespace', 'namespaces'], - }) + _source_includes: ['namespace', 'namespaces'], + }), + expect.anything() ); }); }); @@ -3050,49 +3285,45 @@ describe('SavedObjectsRepository', () => { it(`throws when type is invalid`, async () => { await expectNotFoundError('unknownType', id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when type is hidden`, async () => { await expectNotFoundError(HIDDEN_TYPE, id); - expect(callAdminCluster).not.toHaveBeenCalled(); + expect(client.update).not.toHaveBeenCalled(); }); it(`throws when ES is unable to find the document during get`, async () => { - callAdminCluster.mockResolvedValue({ found: false }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({ found: false }) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the index during get`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when type is multi-namespace and the document exists, but not in this namespace`, async () => { const response = getMockGetResponse({ type: MULTI_NAMESPACE_TYPE, id, namespace }); - callAdminCluster.mockResolvedValue(response); // this._callCluster('get', ...) + client.get.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise(response) + ); await expectNotFoundError(MULTI_NAMESPACE_TYPE, id, { namespace: 'bar-namespace' }); - expectClusterCalls('get'); + expect(client.get).toHaveBeenCalledTimes(1); }); it(`throws when ES is unable to find the document during update`, async () => { - callAdminCluster.mockResolvedValue({ status: 404 }); // this._writeToCluster('update', ...) + client.update.mockResolvedValueOnce( + elasticsearchClientMock.createSuccessTransportRequestPromise({}, { statusCode: 404 }) + ); await expectNotFoundError(type, id); - expectClusterCalls('update'); - }); - }); - - describe('migration', () => { - it(`waits until migrations are complete before proceeding`, async () => { - let callAdminClusterCount = 0; - migrator.runMigrations = jest.fn(async () => - // runMigrations should resolve before callAdminCluster is initiated - expect(callAdminCluster).toHaveBeenCalledTimes(callAdminClusterCount++) - ); - await expect(updateSuccess(type, id, attributes)).resolves.toBeDefined(); - expect(migrator.runMigrations).toHaveReturnedTimes(1); + expect(client.update).toHaveBeenCalledTimes(1); }); }); diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts index 7a5ac9204627c..8b7b1d62c1b7d 100644 --- a/src/core/server/saved_objects/service/lib/repository.ts +++ b/src/core/server/saved_objects/service/lib/repository.ts @@ -19,13 +19,16 @@ import { omit } from 'lodash'; import uuid from 'uuid'; -import { retryCallCluster } from '../../../elasticsearch/legacy'; -import { LegacyAPICaller } from '../../../elasticsearch/'; - +import { + ElasticsearchClient, + DeleteDocumentResponse, + GetResponse, + SearchResponse, +} from '../../../elasticsearch/'; import { getRootPropertiesObjects, IndexMapping } from '../../mappings'; +import { createRepositoryEsClient, RepositoryEsClient } from './repository_es_client'; import { getSearchDsl } from './search_dsl'; import { includedFields } from './included_fields'; -import { decorateEsError } from './decorate_es_error'; import { SavedObjectsErrorHelpers } from './errors'; import { decodeRequestVersion, encodeVersion, encodeHitVersion } from '../../version'; import { KibanaMigrator } from '../../migrations'; @@ -33,6 +36,7 @@ import { SavedObjectsSerializer, SavedObjectSanitizedDoc, SavedObjectsRawDoc, + SavedObjectsRawDocSource, } from '../../serialization'; import { SavedObjectsBulkCreateObject, @@ -74,7 +78,7 @@ const isRight = (either: Either): either is Right => either.tag === 'Right'; export interface SavedObjectsRepositoryOptions { index: string; mappings: IndexMapping; - callCluster: LegacyAPICaller; + client: ElasticsearchClient; typeRegistry: SavedObjectTypeRegistry; serializer: SavedObjectsSerializer; migrator: KibanaMigrator; @@ -95,8 +99,8 @@ export interface SavedObjectsIncrementCounterOptions extends SavedObjectsBaseOpt * @public */ export interface SavedObjectsDeleteByNamespaceOptions extends SavedObjectsBaseOptions { - /** The Elasticsearch Refresh setting for this operation */ - refresh?: MutatingOperationRefreshSetting; + /** The Elasticsearch supports only boolean flag for this operation */ + refresh?: boolean; } const DEFAULT_REFRESH_SETTING = 'wait_for'; @@ -117,7 +121,7 @@ export class SavedObjectsRepository { private _mappings: IndexMapping; private _registry: SavedObjectTypeRegistry; private _allowedTypes: string[]; - private _unwrappedCallCluster: LegacyAPICaller; + private readonly client: RepositoryEsClient; private _serializer: SavedObjectsSerializer; /** @@ -132,7 +136,7 @@ export class SavedObjectsRepository { migrator: KibanaMigrator, typeRegistry: SavedObjectTypeRegistry, indexName: string, - callCluster: LegacyAPICaller, + client: ElasticsearchClient, includedHiddenTypes: string[] = [], injectedConstructor: any = SavedObjectsRepository ): ISavedObjectsRepository { @@ -157,7 +161,7 @@ export class SavedObjectsRepository { typeRegistry, serializer, allowedTypes, - callCluster: retryCallCluster(callCluster), + client, }); } @@ -165,7 +169,7 @@ export class SavedObjectsRepository { const { index, mappings, - callCluster, + client, typeRegistry, serializer, migrator, @@ -183,15 +187,11 @@ export class SavedObjectsRepository { this._index = index; this._mappings = mappings; this._registry = typeRegistry; + this.client = createRepositoryEsClient(client); if (allowedTypes.length === 0) { throw new Error('Empty or missing types for saved object repository!'); } this._allowedTypes = allowedTypes; - - this._unwrappedCallCluster = async (...args: Parameters) => { - await migrator.runMigrations(); - return callCluster(...args); - }; this._serializer = serializer; } @@ -254,17 +254,21 @@ export class SavedObjectsRepository { const raw = this._serializer.savedObjectToRaw(migrated as SavedObjectSanitizedDoc); - const method = id && overwrite ? 'index' : 'create'; - const response = await this._writeToCluster(method, { + const requestParams = { id: raw._id, index: this.getIndexForType(type), refresh, body: raw._source, - }); + }; + + const { body } = + id && overwrite + ? await this.client.index(requestParams) + : await this.client.create(requestParams); return this._rawToSavedObject({ ...raw, - ...response, + ...body, }); } @@ -322,12 +326,14 @@ export class SavedObjectsRepository { _source: ['type', 'namespaces'], })); const bulkGetResponse = bulkGetDocs.length - ? await this._callCluster('mget', { - body: { - docs: bulkGetDocs, + ? await this.client.mget( + { + body: { + docs: bulkGetDocs, + }, }, - ignore: [404], - }) + { ignore: [404] } + ) : undefined; let bulkRequestIndexCounter = 0; @@ -341,8 +347,8 @@ export class SavedObjectsRepository { let savedObjectNamespaces; const { esRequestIndex, object, method } = expectedBulkGetResult.value; if (esRequestIndex !== undefined) { - const indexFound = bulkGetResponse.status !== 404; - const actualResult = indexFound ? bulkGetResponse.docs[esRequestIndex] : undefined; + const indexFound = bulkGetResponse?.statusCode !== 404; + const actualResult = indexFound ? bulkGetResponse?.body.docs[esRequestIndex] : undefined; const docFound = indexFound && actualResult.found === true; if (docFound && !this.rawDocExistsInNamespace(actualResult, namespace)) { const { id, type } = object; @@ -395,7 +401,7 @@ export class SavedObjectsRepository { }); const bulkResponse = bulkCreateParams.length - ? await this._writeToCluster('bulk', { + ? await this.client.bulk({ refresh, body: bulkCreateParams, }) @@ -409,7 +415,7 @@ export class SavedObjectsRepository { const { requestedId, rawMigratedDoc, esRequestIndex } = expectedResult.value; const { error, ...rawResponse } = Object.values( - bulkResponse.items[esRequestIndex] + bulkResponse?.body.items[esRequestIndex] )[0] as any; if (error) { @@ -466,18 +472,20 @@ export class SavedObjectsRepository { namespaces: remainingNamespaces, }; - const updateResponse = await this._writeToCluster('update', { - id: rawId, - index: this.getIndexForType(type), - ...getExpectedVersionProperties(undefined, preflightResult), - refresh, - ignore: [404], - body: { - doc, + const { statusCode } = await this.client.update( + { + id: rawId, + index: this.getIndexForType(type), + ...getExpectedVersionProperties(undefined, preflightResult), + refresh, + body: { + doc, + }, }, - }); + { ignore: [404] } + ); - if (updateResponse.status === 404) { + if (statusCode === 404) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } @@ -485,22 +493,23 @@ export class SavedObjectsRepository { } } - const deleteResponse = await this._writeToCluster('delete', { - id: rawId, - index: this.getIndexForType(type), - ...getExpectedVersionProperties(undefined, preflightResult), - refresh, - ignore: [404], - }); + const { body, statusCode } = await this.client.delete( + { + id: rawId, + index: this.getIndexForType(type), + ...getExpectedVersionProperties(undefined, preflightResult), + refresh, + }, + { ignore: [404] } + ); - const deleted = deleteResponse.result === 'deleted'; + const deleted = body.result === 'deleted'; if (deleted) { return {}; } - const deleteDocNotFound = deleteResponse.result === 'not_found'; - const deleteIndexNotFound = - deleteResponse.error && deleteResponse.error.type === 'index_not_found_exception'; + const deleteDocNotFound = body.result === 'not_found'; + const deleteIndexNotFound = body.error && body.error.type === 'index_not_found_exception'; if (deleteDocNotFound || deleteIndexNotFound) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); @@ -510,7 +519,7 @@ export class SavedObjectsRepository { `Unexpected Elasticsearch DELETE response: ${JSON.stringify({ type, id, - response: deleteResponse, + response: { body, statusCode }, })}` ); } @@ -529,17 +538,16 @@ export class SavedObjectsRepository { throw new TypeError(`namespace is required, and must be a string`); } - const { refresh = DEFAULT_REFRESH_SETTING } = options; const allTypes = Object.keys(getRootPropertiesObjects(this._mappings)); const typesToUpdate = allTypes.filter((type) => !this._registry.isNamespaceAgnostic(type)); - const updateOptions = { - index: this.getIndicesForTypes(typesToUpdate), - ignore: [404], - refresh, - body: { - script: { - source: ` + const { body } = await this.client.updateByQuery( + { + index: this.getIndicesForTypes(typesToUpdate), + refresh: options.refresh, + body: { + script: { + source: ` if (!ctx._source.containsKey('namespaces')) { ctx.op = "delete"; } else { @@ -549,18 +557,20 @@ export class SavedObjectsRepository { } } `, - lang: 'painless', - params: { namespace: getNamespaceString(namespace) }, + lang: 'painless', + params: { namespace: getNamespaceString(namespace) }, + }, + conflicts: 'proceed', + ...getSearchDsl(this._mappings, this._registry, { + namespaces: namespace ? [namespace] : undefined, + type: typesToUpdate, + }), }, - conflicts: 'proceed', - ...getSearchDsl(this._mappings, this._registry, { - namespaces: namespace ? [namespace] : undefined, - type: typesToUpdate, - }), }, - }; + { ignore: [404] } + ); - return await this._writeToCluster('updateByQuery', updateOptions); + return body; } /** @@ -639,7 +649,6 @@ export class SavedObjectsRepository { size: perPage, from: perPage * (page - 1), _source: includedFields(type, fields), - ignore: [404], rest_total_hits_as_int: true, preference, body: { @@ -658,9 +667,10 @@ export class SavedObjectsRepository { }, }; - const response = await this._callCluster('search', esOptions); - - if (response.status === 404) { + const { body, statusCode } = await this.client.search>(esOptions, { + ignore: [404], + }); + if (statusCode === 404) { // 404 is only possible here if the index is missing, which // we don't want to leak, see "404s from missing index" above return { @@ -674,14 +684,14 @@ export class SavedObjectsRepository { return { page, per_page: perPage, - total: response.hits.total, - saved_objects: response.hits.hits.map( + total: body.hits.total, + saved_objects: body.hits.hits.map( (hit: SavedObjectsRawDoc): SavedObjectsFindResult => ({ ...this._rawToSavedObject(hit), score: (hit as any)._score, }) ), - }; + } as SavedObjectsFindResponse; } /** @@ -742,12 +752,14 @@ export class SavedObjectsRepository { _source: includedFields(type, fields), })); const bulkGetResponse = bulkGetDocs.length - ? await this._callCluster('mget', { - body: { - docs: bulkGetDocs, + ? await this.client.mget( + { + body: { + docs: bulkGetDocs, + }, }, - ignore: [404], - }) + { ignore: [404] } + ) : undefined; return { @@ -757,7 +769,7 @@ export class SavedObjectsRepository { } const { type, id, esRequestIndex } = expectedResult.value; - const doc = bulkGetResponse.docs[esRequestIndex]; + const doc = bulkGetResponse?.body.docs[esRequestIndex]; if (!doc.found || !this.rawDocExistsInNamespace(doc, namespace)) { return ({ @@ -808,24 +820,26 @@ export class SavedObjectsRepository { const { namespace } = options; - const response = await this._callCluster('get', { - id: this._serializer.generateRawId(namespace, type, id), - index: this.getIndexForType(type), - ignore: [404], - }); + const { body, statusCode } = await this.client.get>( + { + id: this._serializer.generateRawId(namespace, type, id), + index: this.getIndexForType(type), + }, + { ignore: [404] } + ); - const docNotFound = response.found === false; - const indexNotFound = response.status === 404; - if (docNotFound || indexNotFound || !this.rawDocExistsInNamespace(response, namespace)) { + const docNotFound = body.found === false; + const indexNotFound = statusCode === 404; + if (docNotFound || indexNotFound || !this.rawDocExistsInNamespace(body, namespace)) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } - const { updated_at: updatedAt } = response._source; + const { updated_at: updatedAt } = body._source; - let namespaces = []; + let namespaces: string[] = []; if (!this._registry.isNamespaceAgnostic(type)) { - namespaces = response._source.namespaces ?? [getNamespaceString(response._source.namespace)]; + namespaces = body._source.namespaces ?? [getNamespaceString(body._source.namespace)]; } return { @@ -833,10 +847,10 @@ export class SavedObjectsRepository { type, namespaces, ...(updatedAt && { updated_at: updatedAt }), - version: encodeHitVersion(response), - attributes: response._source[type], - references: response._source.references || [], - migrationVersion: response._source.migrationVersion, + version: encodeHitVersion(body), + attributes: body._source[type], + references: body._source.references || [], + migrationVersion: body._source.migrationVersion, }; } @@ -876,35 +890,37 @@ export class SavedObjectsRepository { ...(Array.isArray(references) && { references }), }; - const updateResponse = await this._writeToCluster('update', { - id: this._serializer.generateRawId(namespace, type, id), - index: this.getIndexForType(type), - ...getExpectedVersionProperties(version, preflightResult), - refresh, - ignore: [404], - body: { - doc, + const { body, statusCode } = await this.client.update( + { + id: this._serializer.generateRawId(namespace, type, id), + index: this.getIndexForType(type), + ...getExpectedVersionProperties(version, preflightResult), + refresh, + + body: { + doc, + }, + _source_includes: ['namespace', 'namespaces'], }, - _sourceIncludes: ['namespace', 'namespaces'], - }); + { ignore: [404] } + ); - if (updateResponse.status === 404) { + if (statusCode === 404) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } let namespaces = []; if (!this._registry.isNamespaceAgnostic(type)) { - namespaces = updateResponse.get._source.namespaces ?? [ - getNamespaceString(updateResponse.get._source.namespace), - ]; + namespaces = body.get._source.namespaces ?? [getNamespaceString(body.get._source.namespace)]; } return { id, type, updated_at: time, - version: encodeHitVersion(updateResponse), + // @ts-expect-error update doesn't have _seq_no, _primary_term as Record / any in LP + version: encodeHitVersion(body), namespaces, references, attributes, @@ -952,18 +968,20 @@ export class SavedObjectsRepository { namespaces: existingNamespaces ? unique(existingNamespaces.concat(namespaces)) : namespaces, }; - const updateResponse = await this._writeToCluster('update', { - id: rawId, - index: this.getIndexForType(type), - ...getExpectedVersionProperties(version, preflightResult), - refresh, - ignore: [404], - body: { - doc, + const { statusCode } = await this.client.update( + { + id: rawId, + index: this.getIndexForType(type), + ...getExpectedVersionProperties(version, preflightResult), + refresh, + body: { + doc, + }, }, - }); + { ignore: [404] } + ); - if (updateResponse.status === 404) { + if (statusCode === 404) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } @@ -1015,40 +1033,48 @@ export class SavedObjectsRepository { namespaces: remainingNamespaces, }; - const updateResponse = await this._writeToCluster('update', { - id: rawId, - index: this.getIndexForType(type), - ...getExpectedVersionProperties(undefined, preflightResult), - refresh, - ignore: [404], - body: { - doc, + const { statusCode } = await this.client.update( + { + id: rawId, + index: this.getIndexForType(type), + ...getExpectedVersionProperties(undefined, preflightResult), + refresh, + + body: { + doc, + }, }, - }); + { + ignore: [404], + } + ); - if (updateResponse.status === 404) { + if (statusCode === 404) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } return {}; } else { // if there are no namespaces remaining, delete the saved object - const deleteResponse = await this._writeToCluster('delete', { - id: this._serializer.generateRawId(undefined, type, id), - index: this.getIndexForType(type), - ...getExpectedVersionProperties(undefined, preflightResult), - refresh, - ignore: [404], - }); + const { body, statusCode } = await this.client.delete( + { + id: this._serializer.generateRawId(undefined, type, id), + refresh, + ...getExpectedVersionProperties(undefined, preflightResult), + index: this.getIndexForType(type), + }, + { + ignore: [404], + } + ); - const deleted = deleteResponse.result === 'deleted'; + const deleted = body.result === 'deleted'; if (deleted) { return {}; } - const deleteDocNotFound = deleteResponse.result === 'not_found'; - const deleteIndexNotFound = - deleteResponse.error && deleteResponse.error.type === 'index_not_found_exception'; + const deleteDocNotFound = body.result === 'not_found'; + const deleteIndexNotFound = body.error && body.error.type === 'index_not_found_exception'; if (deleteDocNotFound || deleteIndexNotFound) { // see "404s from missing index" above throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); @@ -1058,7 +1084,7 @@ export class SavedObjectsRepository { `Unexpected Elasticsearch DELETE response: ${JSON.stringify({ type, id, - response: deleteResponse, + response: { body, statusCode }, })}` ); } @@ -1125,12 +1151,16 @@ export class SavedObjectsRepository { _source: ['type', 'namespaces'], })); const bulkGetResponse = bulkGetDocs.length - ? await this._callCluster('mget', { - body: { - docs: bulkGetDocs, + ? await this.client.mget( + { + body: { + docs: bulkGetDocs, + }, }, - ignore: [404], - }) + { + ignore: [404], + } + ) : undefined; let bulkUpdateRequestIndexCounter = 0; @@ -1145,8 +1175,8 @@ export class SavedObjectsRepository { let namespaces; let versionProperties; if (esRequestIndex !== undefined) { - const indexFound = bulkGetResponse.status !== 404; - const actualResult = indexFound ? bulkGetResponse.docs[esRequestIndex] : undefined; + const indexFound = bulkGetResponse?.statusCode !== 404; + const actualResult = indexFound ? bulkGetResponse?.body.docs[esRequestIndex] : undefined; const docFound = indexFound && actualResult.found === true; if (!docFound || !this.rawDocExistsInNamespace(actualResult, namespace)) { return { @@ -1194,11 +1224,11 @@ export class SavedObjectsRepository { const { refresh = DEFAULT_REFRESH_SETTING } = options; const bulkUpdateResponse = bulkUpdateParams.length - ? await this._writeToCluster('bulk', { + ? await this.client.bulk({ refresh, body: bulkUpdateParams, }) - : {}; + : undefined; return { saved_objects: expectedBulkUpdateResults.map((expectedResult) => { @@ -1207,7 +1237,7 @@ export class SavedObjectsRepository { } const { type, id, namespaces, documentToSave, esRequestIndex } = expectedResult.value; - const response = bulkUpdateResponse.items[esRequestIndex]; + const response = bulkUpdateResponse?.body.items[esRequestIndex]; const { error, _seq_no: seqNo, _primary_term: primaryTerm } = Object.values( response )[0] as any; @@ -1283,11 +1313,11 @@ export class SavedObjectsRepository { const raw = this._serializer.savedObjectToRaw(migrated as SavedObjectSanitizedDoc); - const response = await this._writeToCluster('update', { + const { body } = await this.client.update({ id: raw._id, index: this.getIndexForType(type), refresh, - _source: true, + _source: 'true', body: { script: { source: ` @@ -1315,28 +1345,13 @@ export class SavedObjectsRepository { id, type, updated_at: time, - references: response.get._source.references, - version: encodeHitVersion(response), - attributes: response.get._source[type], + references: body.get._source.references, + // @ts-expect-error + version: encodeHitVersion(body), + attributes: body.get._source[type], }; } - private async _writeToCluster(...args: Parameters) { - try { - return await this._callCluster(...args); - } catch (err) { - throw decorateEsError(err); - } - } - - private async _callCluster(...args: Parameters) { - try { - return await this._unwrappedCallCluster(...args); - } catch (err) { - throw decorateEsError(err); - } - } - /** * Returns index specified by the given type or the default index * @@ -1408,19 +1423,23 @@ export class SavedObjectsRepository { throw new Error(`Cannot make preflight get request for non-multi-namespace type '${type}'.`); } - const response = await this._callCluster('get', { - id: this._serializer.generateRawId(undefined, type, id), - index: this.getIndexForType(type), - ignore: [404], - }); + const { body, statusCode } = await this.client.get>( + { + id: this._serializer.generateRawId(undefined, type, id), + index: this.getIndexForType(type), + }, + { + ignore: [404], + } + ); - const indexFound = response.status !== 404; - const docFound = indexFound && response.found === true; + const indexFound = statusCode !== 404; + const docFound = indexFound && body.found === true; if (docFound) { - if (!this.rawDocExistsInNamespace(response, namespace)) { + if (!this.rawDocExistsInNamespace(body, namespace)) { throw SavedObjectsErrorHelpers.createConflictError(type, id); } - return getSavedObjectNamespaces(namespace, response); + return getSavedObjectNamespaces(namespace, body); } return getSavedObjectNamespaces(namespace); } @@ -1441,18 +1460,20 @@ export class SavedObjectsRepository { } const rawId = this._serializer.generateRawId(undefined, type, id); - const response = await this._callCluster('get', { - id: rawId, - index: this.getIndexForType(type), - ignore: [404], - }); + const { body, statusCode } = await this.client.get>( + { + id: rawId, + index: this.getIndexForType(type), + }, + { ignore: [404] } + ); - const indexFound = response.status !== 404; - const docFound = indexFound && response.found === true; - if (!docFound || !this.rawDocExistsInNamespace(response, namespace)) { + const indexFound = statusCode !== 404; + const docFound = indexFound && body.found === true; + if (!docFound || !this.rawDocExistsInNamespace(body, namespace)) { throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); } - return response as SavedObjectsRawDoc; + return body as SavedObjectsRawDoc; } } diff --git a/src/core/server/saved_objects/service/lib/repository_es_client.test.mock.ts b/src/core/server/saved_objects/service/lib/repository_es_client.test.mock.ts new file mode 100644 index 0000000000000..3dcf82dae5e46 --- /dev/null +++ b/src/core/server/saved_objects/service/lib/repository_es_client.test.mock.ts @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export const retryCallClusterMock = jest.fn((fn) => fn()); +jest.doMock('../../../elasticsearch/client/retry_call_cluster', () => ({ + retryCallCluster: retryCallClusterMock, +})); diff --git a/src/core/server/saved_objects/service/lib/repository_es_client.test.ts b/src/core/server/saved_objects/service/lib/repository_es_client.test.ts new file mode 100644 index 0000000000000..86a984fb67124 --- /dev/null +++ b/src/core/server/saved_objects/service/lib/repository_es_client.test.ts @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { retryCallClusterMock } from './repository_es_client.test.mock'; + +import { createRepositoryEsClient, RepositoryEsClient } from './repository_es_client'; +import { elasticsearchClientMock } from '../../../elasticsearch/client/mocks'; +import { SavedObjectsErrorHelpers } from './errors'; + +describe('RepositoryEsClient', () => { + let client: ReturnType; + let repositoryClient: RepositoryEsClient; + + beforeEach(() => { + client = elasticsearchClientMock.createElasticSearchClient(); + repositoryClient = createRepositoryEsClient(client); + retryCallClusterMock.mockClear(); + }); + + it('delegates call to ES client method', async () => { + expect(repositoryClient.bulk).toStrictEqual(expect.any(Function)); + await repositoryClient.bulk({ body: [] }); + expect(client.bulk).toHaveBeenCalledTimes(1); + }); + + it('wraps a method call in retryCallCluster', async () => { + await repositoryClient.bulk({ body: [] }); + expect(retryCallClusterMock).toHaveBeenCalledTimes(1); + }); + + it('sets maxRetries: 0 to delegate retry logic to retryCallCluster', async () => { + expect(repositoryClient.bulk).toStrictEqual(expect.any(Function)); + await repositoryClient.bulk({ body: [] }); + expect(client.bulk).toHaveBeenCalledWith( + expect.any(Object), + expect.objectContaining({ maxRetries: 0 }) + ); + }); + + it('transform elasticsearch errors into saved objects errors', async () => { + expect.assertions(1); + client.bulk = jest.fn().mockRejectedValue(new Error('reason')); + try { + await repositoryClient.bulk({ body: [] }); + } catch (e) { + expect(SavedObjectsErrorHelpers.isSavedObjectsClientError(e)).toBe(true); + } + }); +}); diff --git a/src/core/server/saved_objects/service/lib/repository_es_client.ts b/src/core/server/saved_objects/service/lib/repository_es_client.ts new file mode 100644 index 0000000000000..0a759669b1af8 --- /dev/null +++ b/src/core/server/saved_objects/service/lib/repository_es_client.ts @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { TransportRequestOptions } from '@elastic/elasticsearch/lib/Transport'; + +import { ElasticsearchClient } from '../../../elasticsearch/'; +import { retryCallCluster } from '../../../elasticsearch/client/retry_call_cluster'; +import { decorateEsError } from './decorate_es_error'; + +const methods = [ + 'bulk', + 'create', + 'delete', + 'get', + 'index', + 'mget', + 'search', + 'update', + 'updateByQuery', +] as const; + +type MethodName = typeof methods[number]; + +export type RepositoryEsClient = Pick; + +export function createRepositoryEsClient(client: ElasticsearchClient): RepositoryEsClient { + return methods.reduce((acc: RepositoryEsClient, key: MethodName) => { + Object.defineProperty(acc, key, { + value: async (params?: unknown, options?: TransportRequestOptions) => { + try { + return await retryCallCluster(() => + (client[key] as Function)(params, { maxRetries: 0, ...options }) + ); + } catch (e) { + throw decorateEsError(e); + } + }, + }); + return acc; + }, {} as RepositoryEsClient); +} diff --git a/src/dev/build/args.test.ts b/src/dev/build/args.test.ts index 6a464eef209ec..bd118b8887c72 100644 --- a/src/dev/build/args.test.ts +++ b/src/dev/build/args.test.ts @@ -17,160 +17,158 @@ * under the License. */ -import { ToolingLog } from '@kbn/dev-utils'; +import { ToolingLog, createAnyInstanceSerializer } from '@kbn/dev-utils'; import { readCliArgs } from './args'; -const fn = (...subArgs: string[]) => { - const result = readCliArgs(['node', 'scripts/build', ...subArgs]); - (result as any).log = result.log instanceof ToolingLog ? '' : String(result.log); - return result; -}; +expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog)); it('renders help if `--help` passed', () => { - expect(fn('--help')).toMatchInlineSnapshot(` -Object { - "log": "undefined", - "showHelp": true, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--help'])).toMatchInlineSnapshot(` + Object { + "log": , + "showHelp": true, + "unknownFlags": Array [], + } + `); }); it('build default and oss dist for current platform, without packages, by default', () => { - expect(fn()).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": false, - "createDockerPackage": false, - "createDockerUbiPackage": false, - "createRpmPackage": false, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": false, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build'])).toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": false, + "createDockerPackage": false, + "createDockerUbiPackage": false, + "createRpmPackage": false, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": false, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); it('builds packages if --all-platforms is passed', () => { - expect(fn('--all-platforms')).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": true, - "createDockerPackage": true, - "createDockerUbiPackage": true, - "createRpmPackage": true, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": true, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--all-platforms'])).toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": true, + "createDockerPackage": true, + "createDockerUbiPackage": true, + "createRpmPackage": true, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": true, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); it('limits packages if --rpm passed with --all-platforms', () => { - expect(fn('--all-platforms', '--rpm')).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": false, - "createDockerPackage": false, - "createDockerUbiPackage": false, - "createRpmPackage": true, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": true, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--rpm'])).toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": false, + "createDockerPackage": false, + "createDockerUbiPackage": false, + "createRpmPackage": true, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": true, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); it('limits packages if --deb passed with --all-platforms', () => { - expect(fn('--all-platforms', '--deb')).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": true, - "createDockerPackage": false, - "createDockerUbiPackage": false, - "createRpmPackage": false, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": true, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--deb'])).toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": true, + "createDockerPackage": false, + "createDockerUbiPackage": false, + "createRpmPackage": false, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": true, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); it('limits packages if --docker passed with --all-platforms', () => { - expect(fn('--all-platforms', '--docker')).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": false, - "createDockerPackage": true, - "createDockerUbiPackage": true, - "createRpmPackage": false, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": true, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker'])) + .toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": false, + "createDockerPackage": true, + "createDockerUbiPackage": true, + "createRpmPackage": false, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": true, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); it('limits packages if --docker passed with --skip-docker-ubi and --all-platforms', () => { - expect(fn('--all-platforms', '--docker', '--skip-docker-ubi')).toMatchInlineSnapshot(` -Object { - "buildArgs": Object { - "buildDefaultDist": true, - "buildOssDist": true, - "createArchives": true, - "createDebPackage": false, - "createDockerPackage": true, - "createDockerUbiPackage": false, - "createRpmPackage": false, - "downloadFreshNode": true, - "isRelease": false, - "targetAllPlatforms": true, - "versionQualifier": "", - }, - "log": "", - "showHelp": false, - "unknownFlags": Array [], -} -`); + expect(readCliArgs(['node', 'scripts/build', '--all-platforms', '--docker', '--skip-docker-ubi'])) + .toMatchInlineSnapshot(` + Object { + "buildOptions": Object { + "buildDefaultDist": true, + "buildOssDist": true, + "createArchives": true, + "createDebPackage": false, + "createDockerPackage": true, + "createDockerUbiPackage": false, + "createRpmPackage": false, + "downloadFreshNode": true, + "isRelease": false, + "targetAllPlatforms": true, + "versionQualifier": "", + }, + "log": , + "showHelp": false, + "unknownFlags": Array [], + } + `); }); diff --git a/src/dev/build/args.ts b/src/dev/build/args.ts index 1ff42d524c596..8e77024a7e8ae 100644 --- a/src/dev/build/args.ts +++ b/src/dev/build/args.ts @@ -20,16 +20,9 @@ import getopts from 'getopts'; import { ToolingLog, pickLevelFromFlags } from '@kbn/dev-utils'; -interface ParsedArgs { - showHelp: boolean; - unknownFlags: string[]; - log?: ToolingLog; - buildArgs?: { - [key: string]: any; - }; -} +import { BuildOptions } from './build_distributables'; -export function readCliArgs(argv: string[]): ParsedArgs { +export function readCliArgs(argv: string[]) { const unknownFlags: string[] = []; const flags = getopts(argv, { boolean: [ @@ -70,8 +63,16 @@ export function readCliArgs(argv: string[]): ParsedArgs { }, }); + const log = new ToolingLog({ + level: pickLevelFromFlags(flags, { + default: flags.debug === false ? 'info' : 'debug', + }), + writeTo: process.stdout, + }); + if (unknownFlags.length || flags.help) { return { + log, showHelp: true, unknownFlags, }; @@ -83,13 +84,6 @@ export function readCliArgs(argv: string[]): ParsedArgs { flags['all-platforms'] = true; } - const log = new ToolingLog({ - level: pickLevelFromFlags(flags, { - default: flags.debug === false ? 'info' : 'debug', - }), - writeTo: process.stdout, - }); - function isOsPackageDesired(name: string) { if (flags['skip-os-packages'] || !flags['all-platforms']) { return false; @@ -103,22 +97,24 @@ export function readCliArgs(argv: string[]): ParsedArgs { return Boolean(flags[name]); } + const buildOptions: BuildOptions = { + isRelease: Boolean(flags.release), + versionQualifier: flags['version-qualifier'], + buildOssDist: flags.oss !== false, + buildDefaultDist: !flags.oss, + downloadFreshNode: !Boolean(flags['skip-node-download']), + createArchives: !Boolean(flags['skip-archives']), + createRpmPackage: isOsPackageDesired('rpm'), + createDebPackage: isOsPackageDesired('deb'), + createDockerPackage: isOsPackageDesired('docker'), + createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']), + targetAllPlatforms: Boolean(flags['all-platforms']), + }; + return { + log, showHelp: false, unknownFlags: [], - log, - buildArgs: { - isRelease: Boolean(flags.release), - versionQualifier: flags['version-qualifier'], - buildOssDist: flags.oss !== false, - buildDefaultDist: !flags.oss, - downloadFreshNode: !Boolean(flags['skip-node-download']), - createArchives: !Boolean(flags['skip-archives']), - createRpmPackage: isOsPackageDesired('rpm'), - createDebPackage: isOsPackageDesired('deb'), - createDockerPackage: isOsPackageDesired('docker'), - createDockerUbiPackage: isOsPackageDesired('docker') && !Boolean(flags['skip-docker-ubi']), - targetAllPlatforms: Boolean(flags['all-platforms']), - }, + buildOptions, }; } diff --git a/src/dev/build/build_distributables.js b/src/dev/build/build_distributables.js deleted file mode 100644 index 39a32fff891c2..0000000000000 --- a/src/dev/build/build_distributables.js +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { getConfig, createRunner } from './lib'; - -import { - BuildKibanaPlatformPluginsTask, - BuildPackagesTask, - CleanEmptyFoldersTask, - CleanExtraBinScriptsTask, - CleanExtraFilesFromModulesTask, - CleanNodeBuildsTask, - CleanPackagesTask, - CleanTask, - CleanTypescriptTask, - CopyBinScriptsTask, - CopySourceTask, - CreateArchivesSourcesTask, - CreateArchivesTask, - CreateDebPackageTask, - CreateDockerPackageTask, - CreateDockerUbiPackageTask, - CreateEmptyDirsAndFilesTask, - CreateNoticeFileTask, - CreatePackageJsonTask, - CreateReadmeTask, - CreateRpmPackageTask, - DownloadNodeBuildsTask, - ExtractNodeBuildsTask, - InstallChromiumTask, - InstallDependenciesTask, - OptimizeBuildTask, - PatchNativeModulesTask, - PathLengthTask, - RemovePackageJsonDepsTask, - RemoveWorkspacesTask, - TranspileBabelTask, - TranspileScssTask, - UpdateLicenseFileTask, - UuidVerificationTask, - VerifyEnvTask, - VerifyExistingNodeBuildsTask, - WriteShaSumsTask, -} from './tasks'; - -export async function buildDistributables(options) { - const { - log, - isRelease, - buildOssDist, - buildDefaultDist, - downloadFreshNode, - createArchives, - createRpmPackage, - createDebPackage, - createDockerPackage, - createDockerUbiPackage, - versionQualifier, - targetAllPlatforms, - } = options; - - log.verbose('building distributables with options:', { - isRelease, - buildOssDist, - buildDefaultDist, - downloadFreshNode, - createArchives, - createRpmPackage, - createDebPackage, - versionQualifier, - }); - - const config = await getConfig({ - isRelease, - versionQualifier, - targetAllPlatforms, - }); - - const run = createRunner({ - config, - log, - buildOssDist, - buildDefaultDist, - }); - - /** - * verify, reset, and initialize the build environment - */ - await run(VerifyEnvTask); - await run(CleanTask); - await run(downloadFreshNode ? DownloadNodeBuildsTask : VerifyExistingNodeBuildsTask); - await run(ExtractNodeBuildsTask); - - /** - * run platform-generic build tasks - */ - await run(CopySourceTask); - await run(CopyBinScriptsTask); - await run(CreateEmptyDirsAndFilesTask); - await run(CreateReadmeTask); - await run(TranspileBabelTask); - await run(BuildPackagesTask); - await run(CreatePackageJsonTask); - await run(InstallDependenciesTask); - await run(RemoveWorkspacesTask); - await run(CleanPackagesTask); - await run(CreateNoticeFileTask); - await run(UpdateLicenseFileTask); - await run(RemovePackageJsonDepsTask); - await run(TranspileScssTask); - await run(BuildKibanaPlatformPluginsTask); - await run(OptimizeBuildTask); - await run(CleanTypescriptTask); - await run(CleanExtraFilesFromModulesTask); - await run(CleanEmptyFoldersTask); - - /** - * copy generic build outputs into platform-specific build - * directories and perform platform/architecture-specific steps - */ - await run(CreateArchivesSourcesTask); - await run(PatchNativeModulesTask); - await run(InstallChromiumTask); - await run(CleanExtraBinScriptsTask); - await run(CleanNodeBuildsTask); - - await run(PathLengthTask); - await run(UuidVerificationTask); - - /** - * package platform-specific builds into archives - * or os-specific packages in the target directory - */ - if (createArchives) { - // control w/ --skip-archives - await run(CreateArchivesTask); - } - if (createDebPackage) { - // control w/ --deb or --skip-os-packages - await run(CreateDebPackageTask); - } - if (createRpmPackage) { - // control w/ --rpm or --skip-os-packages - await run(CreateRpmPackageTask); - } - if (createDockerPackage) { - // control w/ --docker or --skip-docker-ubi or --skip-os-packages - await run(CreateDockerPackageTask); - if (createDockerUbiPackage) { - await run(CreateDockerUbiPackageTask); - } - } - - /** - * finalize artifacts by writing sha1sums of each into the target directory - */ - await run(WriteShaSumsTask); -} diff --git a/src/dev/build/build_distributables.ts b/src/dev/build/build_distributables.ts new file mode 100644 index 0000000000000..bfcc98d6cd9a8 --- /dev/null +++ b/src/dev/build/build_distributables.ts @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ToolingLog } from '@kbn/dev-utils'; + +import { Config, createRunner } from './lib'; +import * as Tasks from './tasks'; + +export interface BuildOptions { + isRelease: boolean; + buildOssDist: boolean; + buildDefaultDist: boolean; + downloadFreshNode: boolean; + createArchives: boolean; + createRpmPackage: boolean; + createDebPackage: boolean; + createDockerPackage: boolean; + createDockerUbiPackage: boolean; + versionQualifier: string | undefined; + targetAllPlatforms: boolean; +} + +export async function buildDistributables(log: ToolingLog, options: BuildOptions) { + log.verbose('building distributables with options:', options); + + const config = await Config.create(options); + + const run = createRunner({ + config, + log, + buildDefaultDist: options.buildDefaultDist, + buildOssDist: options.buildOssDist, + }); + + /** + * verify, reset, and initialize the build environment + */ + await run(Tasks.VerifyEnv); + await run(Tasks.Clean); + await run(options.downloadFreshNode ? Tasks.DownloadNodeBuilds : Tasks.VerifyExistingNodeBuilds); + await run(Tasks.ExtractNodeBuilds); + + /** + * run platform-generic build tasks + */ + await run(Tasks.CopySource); + await run(Tasks.CopyBinScripts); + await run(Tasks.CreateEmptyDirsAndFiles); + await run(Tasks.CreateReadme); + await run(Tasks.TranspileBabel); + await run(Tasks.BuildPackages); + await run(Tasks.CreatePackageJson); + await run(Tasks.InstallDependencies); + await run(Tasks.RemoveWorkspaces); + await run(Tasks.CleanPackages); + await run(Tasks.CreateNoticeFile); + await run(Tasks.UpdateLicenseFile); + await run(Tasks.RemovePackageJsonDeps); + await run(Tasks.TranspileScss); + await run(Tasks.BuildKibanaPlatformPlugins); + await run(Tasks.OptimizeBuild); + await run(Tasks.CleanTypescript); + await run(Tasks.CleanExtraFilesFromModules); + await run(Tasks.CleanEmptyFolders); + + /** + * copy generic build outputs into platform-specific build + * directories and perform platform/architecture-specific steps + */ + await run(Tasks.CreateArchivesSources); + await run(Tasks.PatchNativeModules); + await run(Tasks.InstallChromium); + await run(Tasks.CleanExtraBinScripts); + await run(Tasks.CleanNodeBuilds); + + await run(Tasks.PathLength); + await run(Tasks.UuidVerification); + + /** + * package platform-specific builds into archives + * or os-specific packages in the target directory + */ + if (options.createArchives) { + // control w/ --skip-archives + await run(Tasks.CreateArchives); + } + if (options.createDebPackage) { + // control w/ --deb or --skip-os-packages + await run(Tasks.CreateDebPackage); + } + if (options.createRpmPackage) { + // control w/ --rpm or --skip-os-packages + await run(Tasks.CreateRpmPackage); + } + if (options.createDockerPackage) { + // control w/ --docker or --skip-docker-ubi or --skip-os-packages + await run(Tasks.CreateDockerPackage); + if (options.createDockerUbiPackage) { + await run(Tasks.CreateDockerUbiPackage); + } + } + + /** + * finalize artifacts by writing sha1sums of each into the target directory + */ + await run(Tasks.WriteShaSums); +} diff --git a/src/dev/build/cli.js b/src/dev/build/cli.ts similarity index 91% rename from src/dev/build/cli.js rename to src/dev/build/cli.ts index 9d23f92a3bafd..5811fc42d2009 100644 --- a/src/dev/build/cli.js +++ b/src/dev/build/cli.ts @@ -29,15 +29,15 @@ import { readCliArgs } from './args'; // ensure the cwd() is always the repo root process.chdir(resolve(__dirname, '../../../')); -const { showHelp, unknownFlags, log, buildArgs } = readCliArgs(process.argv); +const { showHelp, unknownFlags, log, buildOptions } = readCliArgs(process.argv); if (unknownFlags.length) { const pluralized = unknownFlags.length > 1 ? 'flags' : 'flag'; - console.log(chalk`\n{red Unknown ${pluralized}: ${unknownFlags.join(', ')}}\n`); + log.error(`Unknown ${pluralized}: ${unknownFlags.join(', ')}}`); } if (showHelp) { - console.log( + log.write( dedent(chalk` {dim usage:} node scripts/build @@ -63,7 +63,7 @@ if (showHelp) { process.exit(1); } -buildDistributables({ log, ...buildArgs }).catch((error) => { +buildDistributables(log, buildOptions!).catch((error) => { if (!isErrorLogged(error)) { log.error('Uncaught error'); log.error(error); diff --git a/src/dev/build/lib/__tests__/fixtures/bin/world_executable b/src/dev/build/lib/__fixtures__/bin/world_executable similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/bin/world_executable rename to src/dev/build/lib/__fixtures__/bin/world_executable diff --git a/src/dev/build/lib/__tests__/fixtures/foo.txt.gz b/src/dev/build/lib/__fixtures__/foo.txt.gz similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/foo.txt.gz rename to src/dev/build/lib/__fixtures__/foo.txt.gz diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir.tar.gz b/src/dev/build/lib/__fixtures__/foo_dir.tar.gz similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/foo_dir.tar.gz rename to src/dev/build/lib/__fixtures__/foo_dir.tar.gz diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/.bar b/src/dev/build/lib/__fixtures__/foo_dir/.bar similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/foo_dir/.bar rename to src/dev/build/lib/__fixtures__/foo_dir/.bar diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/bar.txt b/src/dev/build/lib/__fixtures__/foo_dir/bar.txt similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/foo_dir/bar.txt rename to src/dev/build/lib/__fixtures__/foo_dir/bar.txt diff --git a/src/dev/build/lib/__tests__/fixtures/foo_dir/foo/foo.txt b/src/dev/build/lib/__fixtures__/foo_dir/foo/foo.txt similarity index 100% rename from src/dev/build/lib/__tests__/fixtures/foo_dir/foo/foo.txt rename to src/dev/build/lib/__fixtures__/foo_dir/foo/foo.txt diff --git a/src/legacy/utils/__tests__/fixtures/log_on_sigint.js b/src/dev/build/lib/__fixtures__/log_on_sigint.js similarity index 100% rename from src/legacy/utils/__tests__/fixtures/log_on_sigint.js rename to src/dev/build/lib/__fixtures__/log_on_sigint.js diff --git a/src/dev/build/lib/__tests__/build.js b/src/dev/build/lib/__tests__/build.js deleted file mode 100644 index af9479e73f3dc..0000000000000 --- a/src/dev/build/lib/__tests__/build.js +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -import { createBuild } from '../build'; - -describe('dev/build/lib/build', () => { - describe('Build instance', () => { - describe('#isOss()', () => { - it('returns true if passed oss: true', () => { - const build = createBuild({ - oss: true, - }); - - expect(build.isOss()).to.be(true); - }); - - it('returns false if passed oss: false', () => { - const build = createBuild({ - oss: false, - }); - - expect(build.isOss()).to.be(false); - }); - }); - - describe('#getName()', () => { - it('returns kibana when oss: false', () => { - const build = createBuild({ - oss: false, - }); - - expect(build.getName()).to.be('kibana'); - }); - it('returns kibana-oss when oss: true', () => { - const build = createBuild({ - oss: true, - }); - - expect(build.getName()).to.be('kibana-oss'); - }); - }); - - describe('#getLogTag()', () => { - it('returns string with build name in it', () => { - const build = createBuild({}); - - expect(build.getLogTag()).to.contain(build.getName()); - }); - }); - - describe('#resolvePath()', () => { - it('uses passed config to resolve a path relative to the build', () => { - const resolveFromRepo = sinon.stub(); - const build = createBuild({ - config: { resolveFromRepo }, - }); - - build.resolvePath('bar'); - sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar'); - }); - - it('passes all arguments to config.resolveFromRepo()', () => { - const resolveFromRepo = sinon.stub(); - const build = createBuild({ - config: { resolveFromRepo }, - }); - - build.resolvePath('bar', 'baz', 'box'); - sinon.assert.calledWithExactly(resolveFromRepo, 'build', 'kibana', 'bar', 'baz', 'box'); - }); - }); - - describe('#resolvePathForPlatform()', () => { - it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => { - const resolveFromRepo = sinon.stub(); - const getBuildVersion = sinon.stub().returns('buildVersion'); - const build = createBuild({ - oss: true, - config: { resolveFromRepo, getBuildVersion }, - }); - - const getBuildName = sinon.stub().returns('platformName'); - const platform = { - getBuildName, - }; - - build.resolvePathForPlatform(platform, 'foo', 'bar'); - sinon.assert.calledWithExactly(getBuildName); - sinon.assert.calledWithExactly(getBuildVersion); - sinon.assert.calledWithExactly( - resolveFromRepo, - 'build', - 'oss', - `kibana-buildVersion-platformName`, - 'foo', - 'bar' - ); - }); - }); - - describe('#getPlatformArchivePath()', () => { - const sandbox = sinon.createSandbox(); - - const config = { - resolveFromRepo: sandbox.stub(), - getBuildVersion: sandbox.stub().returns('buildVersion'), - }; - - const build = createBuild({ - oss: false, - config, - }); - - const platform = { - getBuildName: sandbox.stub().returns('platformName'), - isWindows: sandbox.stub().returns(false), - }; - - beforeEach(() => { - sandbox.resetHistory(); - }); - - it('uses config.resolveFromRepo(), config.getBuildVersion, and platform.getBuildName() to create path', () => { - build.getPlatformArchivePath(platform); - sinon.assert.calledWithExactly(platform.getBuildName); - sinon.assert.calledWithExactly(platform.isWindows); - sinon.assert.calledWithExactly(config.getBuildVersion); - sinon.assert.calledWithExactly( - config.resolveFromRepo, - 'target', - `kibana-buildVersion-platformName.tar.gz` - ); - }); - - it('creates .zip path if platform is windows', () => { - platform.isWindows.returns(true); - build.getPlatformArchivePath(platform); - sinon.assert.calledWithExactly(platform.getBuildName); - sinon.assert.calledWithExactly(platform.isWindows); - sinon.assert.calledWithExactly(config.getBuildVersion); - sinon.assert.calledWithExactly( - config.resolveFromRepo, - 'target', - `kibana-buildVersion-platformName.zip` - ); - }); - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/config.js b/src/dev/build/lib/__tests__/config.js deleted file mode 100644 index 9544fc84dc6ff..0000000000000 --- a/src/dev/build/lib/__tests__/config.js +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { resolve } from 'path'; - -import expect from '@kbn/expect'; - -import pkg from '../../../../../package.json'; -import { getConfig } from '../config'; -import { getVersionInfo } from '../version_info'; - -describe('dev/build/lib/config', () => { - const setup = async function ({ targetAllPlatforms = true } = {}) { - const isRelease = Boolean(Math.round(Math.random())); - const config = await getConfig({ - isRelease, - targetAllPlatforms, - }); - const buildInfo = await getVersionInfo({ - isRelease, - pkg, - }); - return { config, buildInfo }; - }; - - describe('#getKibanaPkg()', () => { - it('returns the parsed package.json from the Kibana repo', async () => { - const { config } = await setup(); - expect(config.getKibanaPkg()).to.eql(pkg); - }); - }); - - describe('#getNodeVersion()', () => { - it('returns the node version from the kibana package.json', async () => { - const { config } = await setup(); - expect(config.getNodeVersion()).to.eql(pkg.engines.node); - }); - }); - - describe('#getRepoRelativePath()', () => { - it('converts an absolute path to relative path, from the root of the repo', async () => { - const { config } = await setup(); - expect(config.getRepoRelativePath(__dirname)).to.match(/^src[\/\\]dev[\/\\]build/); - }); - }); - - describe('#resolveFromRepo()', () => { - it('resolves a relative path', async () => { - const { config } = await setup(); - expect(config.resolveFromRepo('src/dev/build/lib/__tests__')).to.be(__dirname); - }); - - it('resolves a series of relative paths', async () => { - const { config } = await setup(); - expect(config.resolveFromRepo('src', 'dev', 'build', 'lib', '__tests__')).to.be(__dirname); - }); - }); - - describe('#getPlatform()', () => { - it('throws error when platform does not exist', async () => { - const { config } = await setup(); - const fn = () => config.getPlatform('foo', 'x64'); - - expect(fn).to.throwException(/Unable to find platform/); - }); - - it('throws error when architecture does not exist', async () => { - const { config } = await setup(); - const fn = () => config.getPlatform('linux', 'foo'); - - expect(fn).to.throwException(/Unable to find platform/); - }); - }); - - describe('#getTargetPlatforms()', () => { - it('returns an array of all platform objects', async () => { - const { config } = await setup(); - expect( - config - .getTargetPlatforms() - .map((p) => p.getNodeArch()) - .sort() - ).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']); - }); - - it('returns just this platform when targetAllPlatforms = false', async () => { - const { config } = await setup({ targetAllPlatforms: false }); - const platforms = config.getTargetPlatforms(); - - expect(platforms).to.be.an('array'); - expect(platforms).to.have.length(1); - expect(platforms[0]).to.be(config.getPlatformForThisOs()); - }); - }); - - describe('#getNodePlatforms()', () => { - it('returns all platforms', async () => { - const { config } = await setup(); - expect( - config - .getTargetPlatforms() - .map((p) => p.getNodeArch()) - .sort() - ).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']); - }); - - it('returns this platform and linux, when targetAllPlatforms = false', async () => { - const { config } = await setup({ targetAllPlatforms: false }); - const platforms = config.getNodePlatforms(); - expect(platforms).to.be.an('array'); - if (process.platform !== 'linux') { - expect(platforms).to.have.length(2); - expect(platforms[0]).to.be(config.getPlatformForThisOs()); - expect(platforms[1]).to.be(config.getPlatform('linux', 'x64')); - } else { - expect(platforms).to.have.length(1); - expect(platforms[0]).to.be(config.getPlatform('linux', 'x64')); - } - }); - }); - - describe('#getPlatformForThisOs()', () => { - it('returns the platform that matches the arch of this machine', async () => { - const { config } = await setup(); - const currentPlatform = config.getPlatformForThisOs(); - expect(currentPlatform.getName()).to.be(process.platform); - expect(currentPlatform.getArchitecture()).to.be(process.arch); - }); - }); - - describe('#getBuildVersion()', () => { - it('returns the version from the build info', async () => { - const { config, buildInfo } = await setup(); - expect(config.getBuildVersion()).to.be(buildInfo.buildVersion); - }); - }); - - describe('#getBuildNumber()', () => { - it('returns the number from the build info', async () => { - const { config, buildInfo } = await setup(); - expect(config.getBuildNumber()).to.be(buildInfo.buildNumber); - }); - }); - - describe('#getBuildSha()', () => { - it('returns the sha from the build info', async () => { - const { config, buildInfo } = await setup(); - expect(config.getBuildSha()).to.be(buildInfo.buildSha); - }); - }); - - describe('#resolveFromTarget()', () => { - it('resolves a relative path, from the target directory', async () => { - const { config } = await setup(); - expect(config.resolveFromTarget()).to.be(resolve(__dirname, '../../../../../target')); - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/download.js b/src/dev/build/lib/__tests__/download.js deleted file mode 100644 index 49cb9caaaf4ec..0000000000000 --- a/src/dev/build/lib/__tests__/download.js +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { createServer } from 'http'; -import { join } from 'path'; -import { tmpdir } from 'os'; -import { mkdirp, readFileSync } from 'fs-extra'; - -import del from 'del'; -import sinon from 'sinon'; -import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test'; -import expect from '@kbn/expect'; -import Wreck from '@hapi/wreck'; - -import { ToolingLog } from '@kbn/dev-utils'; -import { download } from '../download'; - -const getTempFolder = async () => { - const dir = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir'); - console.log(dir); - await mkdirp(dir); - return dir; -}; - -describe('src/dev/build/tasks/nodejs/download', () => { - const sandbox = sinon.createSandbox(); - let TMP_DESTINATION; - let TMP_DIR; - - beforeEach(async () => { - TMP_DIR = await getTempFolder(); - TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__'); - }); - - afterEach(async () => { - await del(TMP_DIR, { force: true }); - }); - afterEach(() => sandbox.reset()); - - const onLogLine = sandbox.stub(); - const log = new ToolingLog({ - level: 'verbose', - writeTo: { - write: onLogLine, - }, - }); - - const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'; - const createSendHandler = (send) => (req, res) => { - res.statusCode = 200; - res.end(send); - }; - const sendErrorHandler = (req, res) => { - res.statusCode = 500; - res.end(); - }; - - let server; - let serverUrl; - let nextHandler; - afterEach(() => (nextHandler = null)); - - before(async () => { - server = createServer((req, res) => { - if (!nextHandler) { - nextHandler = sendErrorHandler; - } - - const handler = nextHandler; - nextHandler = null; - handler(req, res); - }); - - await Promise.race([ - new Promise((resolve, reject) => { - server.once('error', reject); - }), - new Promise((resolve) => { - server.listen(resolve); - }), - ]); - - serverUrl = `http://localhost:${server.address().port}/`; - }); - - after(async () => { - server.close(); - server = null; - }); - - it('downloads from URL and checks that content matches sha256', async () => { - nextHandler = createSendHandler('foo'); - await download({ - log, - url: serverUrl, - destination: TMP_DESTINATION, - sha256: FOO_SHA256, - }); - expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo'); - }); - - it('rejects and deletes destination if sha256 does not match', async () => { - nextHandler = createSendHandler('foo'); - - try { - await download({ - log, - url: serverUrl, - destination: TMP_DESTINATION, - sha256: 'bar', - }); - throw new Error('Expected download() to reject'); - } catch (error) { - expect(error) - .to.have.property('message') - .contain('does not match the expected sha256 checksum'); - } - - try { - readFileSync(TMP_DESTINATION); - throw new Error('Expected download to be deleted'); - } catch (error) { - expect(error).to.have.property('code', 'ENOENT'); - } - }); - - describe('reties download retries: number of times', () => { - it('resolves if retries = 1 and first attempt fails', async () => { - let reqCount = 0; - nextHandler = function sequenceHandler(req, res) { - switch (++reqCount) { - case 1: - nextHandler = sequenceHandler; - return sendErrorHandler(req, res); - default: - return createSendHandler('foo')(req, res); - } - }; - - await download({ - log, - url: serverUrl, - destination: TMP_DESTINATION, - sha256: FOO_SHA256, - retries: 2, - }); - - expect(readFileSync(TMP_DESTINATION, 'utf8')).to.be('foo'); - }); - - it('resolves if first fails, second is bad shasum, but third succeeds', async () => { - let reqCount = 0; - nextHandler = function sequenceHandler(req, res) { - switch (++reqCount) { - case 1: - nextHandler = sequenceHandler; - return sendErrorHandler(req, res); - case 2: - nextHandler = sequenceHandler; - return createSendHandler('bar')(req, res); - default: - return createSendHandler('foo')(req, res); - } - }; - - await download({ - log, - url: serverUrl, - destination: TMP_DESTINATION, - sha256: FOO_SHA256, - retries: 2, - }); - }); - - it('makes 6 requests if `retries: 5` and all failed', async () => { - let reqCount = 0; - nextHandler = function sequenceHandler(req, res) { - reqCount += 1; - nextHandler = sequenceHandler; - sendErrorHandler(req, res); - }; - - try { - await download({ - log, - url: serverUrl, - destination: TMP_DESTINATION, - sha256: FOO_SHA256, - retries: 5, - }); - throw new Error('Expected download() to reject'); - } catch (error) { - expect(error).to.have.property('message').contain('Request failed with status code 500'); - expect(reqCount).to.be(6); - } - }); - }); - - describe('sha256 option not supplied', () => { - before(() => { - sinon.stub(Wreck, 'request'); - }); - after(() => { - Wreck.request.restore(); - }); - - it('refuses to download', async () => { - try { - await download({ - log, - url: 'http://google.com', - destination: TMP_DESTINATION, - }); - - throw new Error('expected download() to reject'); - } catch (error) { - expect(error).to.have.property('message').contain('refusing to download'); - } - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/exec.js b/src/dev/build/lib/__tests__/exec.js deleted file mode 100644 index 8e122c65132ac..0000000000000 --- a/src/dev/build/lib/__tests__/exec.js +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import stripAnsi from 'strip-ansi'; - -import { ToolingLog } from '@kbn/dev-utils'; -import { exec } from '../exec'; - -describe('dev/build/lib/exec', () => { - const sandbox = sinon.createSandbox(); - afterEach(() => sandbox.reset()); - - const onLogLine = sandbox.stub(); - const log = new ToolingLog({ - level: 'verbose', - writeTo: { - write: (chunk) => { - onLogLine(stripAnsi(chunk)); - }, - }, - }); - - it('executes a command, logs the command, and logs the output', async () => { - await exec(log, process.execPath, ['-e', 'console.log("hi")']); - - // logs the command before execution - sinon.assert.calledWithExactly(onLogLine, sinon.match(`$ ${process.execPath}`)); - - // log output of the process - sinon.assert.calledWithExactly(onLogLine, sinon.match(/debg\s+hi/)); - }); - - it('logs using level: option', async () => { - await exec(log, process.execPath, ['-e', 'console.log("hi")'], { - level: 'info', - }); - - // log output of the process - sinon.assert.calledWithExactly(onLogLine, sinon.match(/info\s+hi/)); - }); -}); diff --git a/src/dev/build/lib/__tests__/fs.js b/src/dev/build/lib/__tests__/fs.js deleted file mode 100644 index bf7596b012f79..0000000000000 --- a/src/dev/build/lib/__tests__/fs.js +++ /dev/null @@ -1,362 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { resolve } from 'path'; -import { chmodSync, statSync } from 'fs'; - -import del from 'del'; -import expect from '@kbn/expect'; - -import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs'; - -const TMP = resolve(__dirname, '__tmp__'); -const FIXTURES = resolve(__dirname, 'fixtures'); -const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz'); -const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz'); -const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt'); -const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable'); - -const isWindows = /^win/.test(process.platform); - -// get the mode of a file as a string, like 777, or 644, -function getCommonMode(path) { - return statSync(path).mode.toString(8).slice(-3); -} - -function assertNonAbsoluteError(error) { - expect(error).to.be.an(Error); - expect(error.message).to.contain('Please use absolute paths'); -} - -describe('dev/build/lib/fs', () => { - // ensure WORLD_EXECUTABLE is actually executable by all - before(async () => { - chmodSync(WORLD_EXECUTABLE, 0o777); - }); - - // clean and recreate TMP directory - beforeEach(async () => { - await del(TMP); - await mkdirp(TMP); - }); - - // cleanup TMP directory - after(async () => { - await del(TMP); - }); - - describe('mkdirp()', () => { - it('rejects if path is not absolute', async () => { - try { - await mkdirp('foo/bar'); - throw new Error('Expected mkdirp() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('makes directory and necessary parent directories', async () => { - const destination = resolve(TMP, 'a/b/c/d/e/f/g'); - - expect(await mkdirp(destination)).to.be(undefined); - - expect(statSync(destination).isDirectory()).to.be(true); - }); - }); - - describe('write()', () => { - it('rejects if path is not absolute', async () => { - try { - await write('foo/bar'); - throw new Error('Expected write() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('writes content to a file with existing parent directory', async () => { - const destination = resolve(TMP, 'a'); - - expect(await write(destination, 'bar')).to.be(undefined); - expect(await read(destination)).to.be('bar'); - }); - - it('writes content to a file with missing parents', async () => { - const destination = resolve(TMP, 'a/b/c/d/e'); - - expect(await write(destination, 'bar')).to.be(undefined); - expect(await read(destination)).to.be('bar'); - }); - }); - - describe('read()', () => { - it('rejects if path is not absolute', async () => { - try { - await read('foo/bar'); - throw new Error('Expected read() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('reads file, resolves with result', async () => { - expect(await read(BAR_TXT_PATH)).to.be('bar\n'); - }); - }); - - describe('getChildPaths()', () => { - it('rejects if path is not absolute', async () => { - try { - await getChildPaths('foo/bar'); - throw new Error('Expected getChildPaths() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('resolves with absolute paths to the children of directory', async () => { - const path = resolve(FIXTURES, 'foo_dir'); - expect((await getChildPaths(path)).sort()).to.eql([ - resolve(FIXTURES, 'foo_dir/.bar'), - BAR_TXT_PATH, - resolve(FIXTURES, 'foo_dir/foo'), - ]); - }); - - it('rejects with ENOENT if path does not exist', async () => { - try { - await getChildPaths(resolve(FIXTURES, 'notrealpath')); - throw new Error('Expected getChildPaths() to reject'); - } catch (error) { - expect(error).to.have.property('code', 'ENOENT'); - } - }); - }); - - describe('copyAll()', () => { - it('rejects if source path is not absolute', async () => { - try { - await copyAll('foo/bar', __dirname); - throw new Error('Expected copyAll() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if destination path is not absolute', async () => { - try { - await copyAll(__dirname, 'foo/bar'); - throw new Error('Expected copyAll() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if neither path is not absolute', async () => { - try { - await copyAll('foo/bar', 'foo/bar'); - throw new Error('Expected copyAll() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => { - const destination = resolve(TMP, 'a/b/c'); - await copyAll(FIXTURES, destination); - - expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).to.eql([ - resolve(destination, 'foo_dir/bar.txt'), - resolve(destination, 'foo_dir/foo'), - ]); - - expect(getCommonMode(resolve(destination, 'bin/world_executable'))).to.be( - isWindows ? '666' : '777' - ); - expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).to.be( - isWindows ? '666' : '644' - ); - }); - - it('applies select globs if specified, ignores dot files', async () => { - const destination = resolve(TMP, 'a/b/c/d'); - await copyAll(FIXTURES, destination, { - select: ['**/*bar*'], - }); - - try { - statSync(resolve(destination, 'bin/world_executable')); - throw new Error('expected bin/world_executable to not by copied'); - } catch (error) { - expect(error).to.have.property('code', 'ENOENT'); - } - - try { - statSync(resolve(destination, 'foo_dir/.bar')); - throw new Error('expected foo_dir/.bar to not by copied'); - } catch (error) { - expect(error).to.have.property('code', 'ENOENT'); - } - - expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n'); - }); - - it('supports select globs and dot option together', async () => { - const destination = resolve(TMP, 'a/b/c/d'); - await copyAll(FIXTURES, destination, { - select: ['**/*bar*'], - dot: true, - }); - - try { - statSync(resolve(destination, 'bin/world_executable')); - throw new Error('expected bin/world_executable to not by copied'); - } catch (error) { - expect(error).to.have.property('code', 'ENOENT'); - } - - expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n'); - expect(await read(resolve(destination, 'foo_dir/.bar'))).to.be('dotfile\n'); - }); - - it('supports atime and mtime', async () => { - const destination = resolve(TMP, 'a/b/c/d/e'); - const time = new Date(1425298511000); - await copyAll(FIXTURES, destination, { - time, - }); - const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt')); - const fooDir = statSync(resolve(destination, 'foo_dir')); - - // precision is platform specific - const oneDay = 86400000; - expect(Math.abs(barTxt.atimeMs - time.getTime())).to.be.below(oneDay); - expect(Math.abs(fooDir.atimeMs - time.getTime())).to.be.below(oneDay); - expect(Math.abs(barTxt.mtimeMs - time.getTime())).to.be.below(oneDay); - }); - }); - - describe('getFileHash()', () => { - it('rejects if path is not absolute', async () => { - try { - await getFileHash('foo/bar'); - throw new Error('Expected getFileHash() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('resolves with the sha1 hash of a file', async () => { - expect(await getFileHash(BAR_TXT_PATH, 'sha1')).to.be( - 'e242ed3bffccdf271b7fbaf34ed72d089537b42f' - ); - }); - it('resolves with the sha256 hash of a file', async () => { - expect(await getFileHash(BAR_TXT_PATH, 'sha256')).to.be( - '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730' - ); - }); - it('resolves with the md5 hash of a file', async () => { - expect(await getFileHash(BAR_TXT_PATH, 'md5')).to.be('c157a79031e1c40f85931829bc5fc552'); - }); - }); - - describe('untar()', () => { - it('rejects if source path is not absolute', async () => { - try { - await untar('foo/bar', '**/*', __dirname); - throw new Error('Expected untar() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if destination path is not absolute', async () => { - try { - await untar(__dirname, '**/*', 'foo/bar'); - throw new Error('Expected untar() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if neither path is not absolute', async () => { - try { - await untar('foo/bar', '**/*', 'foo/bar'); - throw new Error('Expected untar() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('extracts tarbar from source into destination, creating destination if necessary', async () => { - const destination = resolve(TMP, 'a/b/c/d/e/f'); - await untar(FOO_TAR_PATH, destination); - expect(await read(resolve(destination, 'foo_dir/bar.txt'))).to.be('bar\n'); - expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).to.be('foo\n'); - }); - - it('passed thrid argument to Extract class, overriding path with destination', async () => { - const destination = resolve(TMP, 'a/b/c'); - - await untar(FOO_TAR_PATH, destination, { - path: '/dev/null', - strip: 1, - }); - - expect(await read(resolve(destination, 'bar.txt'))).to.be('bar\n'); - expect(await read(resolve(destination, 'foo/foo.txt'))).to.be('foo\n'); - }); - }); - - describe('gunzip()', () => { - it('rejects if source path is not absolute', async () => { - try { - await gunzip('foo/bar', '**/*', __dirname); - throw new Error('Expected gunzip() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if destination path is not absolute', async () => { - try { - await gunzip(__dirname, '**/*', 'foo/bar'); - throw new Error('Expected gunzip() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('rejects if neither path is not absolute', async () => { - try { - await gunzip('foo/bar', '**/*', 'foo/bar'); - throw new Error('Expected gunzip() to reject'); - } catch (error) { - assertNonAbsoluteError(error); - } - }); - - it('extracts gzip from source into destination, creating destination if necessary', async () => { - const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt'); - await gunzip(FOO_GZIP_PATH, destination); - expect(await read(resolve(destination))).to.be('foo\n'); - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/platform.js b/src/dev/build/lib/__tests__/platform.js deleted file mode 100644 index a7bb5670ee412..0000000000000 --- a/src/dev/build/lib/__tests__/platform.js +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import { createPlatform } from '../platform'; - -describe('src/dev/build/lib/platform', () => { - describe('getName()', () => { - it('returns the name argument', () => { - expect(createPlatform('foo').getName()).to.be('foo'); - }); - }); - - describe('getNodeArch()', () => { - it('returns the node arch for the passed name', () => { - expect(createPlatform('win32', 'x64').getNodeArch()).to.be('win32-x64'); - }); - }); - - describe('getBuildName()', () => { - it('returns the build name for the passed name', () => { - expect(createPlatform('linux', 'arm64', 'linux-aarch64').getBuildName()).to.be( - 'linux-aarch64' - ); - }); - }); - - describe('isWindows()', () => { - it('returns true if name is win32', () => { - expect(createPlatform('win32', 'x64').isWindows()).to.be(true); - expect(createPlatform('linux', 'x64').isWindows()).to.be(false); - expect(createPlatform('darwin', 'x64').isWindows()).to.be(false); - }); - }); - - describe('isLinux()', () => { - it('returns true if name is linux', () => { - expect(createPlatform('win32', 'x64').isLinux()).to.be(false); - expect(createPlatform('linux', 'x64').isLinux()).to.be(true); - expect(createPlatform('darwin', 'x64').isLinux()).to.be(false); - }); - }); - - describe('isMac()', () => { - it('returns true if name is darwin', () => { - expect(createPlatform('win32', 'x64').isMac()).to.be(false); - expect(createPlatform('linux', 'x64').isMac()).to.be(false); - expect(createPlatform('darwin', 'x64').isMac()).to.be(true); - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/runner.js b/src/dev/build/lib/__tests__/runner.js deleted file mode 100644 index 314c2dd45d50f..0000000000000 --- a/src/dev/build/lib/__tests__/runner.js +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import { ToolingLog } from '@kbn/dev-utils'; -import { createRunner } from '../runner'; -import { isErrorLogged, markErrorLogged } from '../errors'; - -describe('dev/build/lib/runner', () => { - const sandbox = sinon.createSandbox(); - - const config = {}; - - const onLogLine = sandbox.stub(); - const log = new ToolingLog({ - level: 'verbose', - writeTo: { - write: onLogLine, - }, - }); - - const buildMatcher = sinon.match({ - isOss: sinon.match.func, - resolvePath: sinon.match.func, - resolvePathForPlatform: sinon.match.func, - getPlatformArchivePath: sinon.match.func, - getName: sinon.match.func, - getLogTag: sinon.match.func, - }); - - const ossBuildMatcher = buildMatcher.and(sinon.match((b) => b.isOss(), 'is oss build')); - const defaultBuildMatcher = buildMatcher.and(sinon.match((b) => !b.isOss(), 'is not oss build')); - - afterEach(() => sandbox.reset()); - - describe('defaults', () => { - const run = createRunner({ - config, - log, - }); - - it('returns a promise', () => { - expect(run({ run: sinon.stub() })).to.be.a(Promise); - }); - - it('runs global task once, passing config and log', async () => { - const runTask = sinon.stub(); - await run({ global: true, run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array); - }); - - it('does not call local tasks', async () => { - const runTask = sinon.stub(); - await run({ run: runTask }); - sinon.assert.notCalled(runTask); - }); - }); - - describe('buildOssDist = true, buildDefaultDist = true', () => { - const run = createRunner({ - config, - log, - buildOssDist: true, - buildDefaultDist: true, - }); - - it('runs global task once, passing config and log', async () => { - const runTask = sinon.stub(); - await run({ global: true, run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array); - }); - - it('runs local tasks twice, passing config log and both builds', async () => { - const runTask = sinon.stub(); - await run({ run: runTask }); - sinon.assert.calledTwice(runTask); - sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher); - sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher); - }); - }); - - describe('just default dist', () => { - const run = createRunner({ - config, - log, - buildDefaultDist: true, - }); - - it('runs global task once, passing config and log', async () => { - const runTask = sinon.stub(); - await run({ global: true, run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array); - }); - - it('runs local tasks once, passing config log and default build', async () => { - const runTask = sinon.stub(); - await run({ run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, defaultBuildMatcher); - }); - }); - - describe('just oss dist', () => { - const run = createRunner({ - config, - log, - buildOssDist: true, - }); - - it('runs global task once, passing config and log', async () => { - const runTask = sinon.stub(); - await run({ global: true, run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, sinon.match.array); - }); - - it('runs local tasks once, passing config log and oss build', async () => { - const runTask = sinon.stub(); - await run({ run: runTask }); - sinon.assert.calledOnce(runTask); - sinon.assert.calledWithExactly(runTask, config, log, ossBuildMatcher); - }); - }); - - describe('task rejects', () => { - const run = createRunner({ - config, - log, - buildOssDist: true, - }); - - it('rejects, logs error, and marks error logged', async () => { - try { - await run({ - async run() { - throw new Error('FOO'); - }, - }); - throw new Error('expected run() to reject'); - } catch (error) { - expect(error).to.have.property('message').be('FOO'); - sinon.assert.calledWith(onLogLine, sinon.match(/FOO/)); - expect(isErrorLogged(error)).to.be(true); - } - }); - - it('just rethrows errors that have already been logged', async () => { - try { - await run({ - async run() { - throw markErrorLogged(new Error('FOO')); - }, - }); - - throw new Error('expected run() to reject'); - } catch (error) { - expect(error).to.have.property('message').be('FOO'); - sinon.assert.neverCalledWith(onLogLine, sinon.match(/FOO/)); - expect(isErrorLogged(error)).to.be(true); - } - }); - }); -}); diff --git a/src/dev/build/lib/__tests__/version_info.js b/src/dev/build/lib/__tests__/version_info.js deleted file mode 100644 index a7329642e4f9a..0000000000000 --- a/src/dev/build/lib/__tests__/version_info.js +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; - -import pkg from '../../../../../package.json'; -import { getVersionInfo } from '../version_info'; - -describe('dev/build/lib/version_info', () => { - describe('isRelease = true', () => { - it('returns unchanged package.version, build sha, and build number', async () => { - const versionInfo = await getVersionInfo({ - isRelease: true, - pkg, - }); - - expect(versionInfo).to.have.property('buildVersion', pkg.version); - expect(versionInfo) - .to.have.property('buildSha') - .match(/^[0-9a-f]{40}$/); - expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000); - }); - }); - describe('isRelease = false', () => { - it('returns snapshot version, build sha, and build number', async () => { - const versionInfo = await getVersionInfo({ - isRelease: false, - pkg, - }); - - expect(versionInfo) - .to.have.property('buildVersion') - .contain(pkg.version) - .match(/-SNAPSHOT$/); - expect(versionInfo) - .to.have.property('buildSha') - .match(/^[0-9a-f]{40}$/); - expect(versionInfo).to.have.property('buildNumber').a('number').greaterThan(1000); - }); - }); - - describe('versionQualifier', () => { - it('appends a version qualifier', async () => { - const versionInfo = await getVersionInfo({ - isRelease: true, - versionQualifier: 'beta55', - pkg, - }); - expect(versionInfo) - .to.have.property('buildVersion') - .be(pkg.version + '-beta55'); - }); - }); -}); diff --git a/src/dev/build/lib/build.js b/src/dev/build/lib/build.js deleted file mode 100644 index fe5111ad1377a..0000000000000 --- a/src/dev/build/lib/build.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import chalk from 'chalk'; - -export function createBuild({ config, oss }) { - const name = oss ? 'kibana-oss' : 'kibana'; - const logTag = oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`; - - return new (class Build { - isOss() { - return !!oss; - } - - resolvePath(...args) { - return config.resolveFromRepo('build', name, ...args); - } - - resolvePathForPlatform(platform, ...args) { - return config.resolveFromRepo( - 'build', - oss ? 'oss' : 'default', - `kibana-${config.getBuildVersion()}-${platform.getBuildName()}`, - ...args - ); - } - - getPlatformArchivePath(platform) { - const ext = platform.isWindows() ? 'zip' : 'tar.gz'; - return config.resolveFromRepo( - 'target', - `${name}-${config.getBuildVersion()}-${platform.getBuildName()}.${ext}` - ); - } - - getName() { - return name; - } - - getLogTag() { - return logTag; - } - })(); -} diff --git a/src/dev/build/lib/build.test.ts b/src/dev/build/lib/build.test.ts new file mode 100644 index 0000000000000..9fdf21cee6567 --- /dev/null +++ b/src/dev/build/lib/build.test.ts @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils'; + +import { Config } from './config'; +import { Build } from './build'; + +expect.addSnapshotSerializer(createAbsolutePathSerializer()); + +const config = new Config( + true, + { + version: '8.0.0', + engines: { + node: '*', + }, + workspaces: { + packages: [], + }, + }, + '1.2.3', + REPO_ROOT, + { + buildNumber: 1234, + buildSha: 'abcd1234', + buildVersion: '8.0.0', + }, + true +); + +const linuxPlatform = config.getPlatform('linux', 'x64'); +const linuxArmPlatform = config.getPlatform('linux', 'arm64'); +const windowsPlatform = config.getPlatform('win32', 'x64'); + +beforeEach(() => { + jest.clearAllMocks(); +}); + +const ossBuild = new Build(config, true); +const defaultBuild = new Build(config, false); + +describe('#isOss()', () => { + it('returns true for oss', () => { + expect(ossBuild.isOss()).toBe(true); + }); + + it('returns false for default build', () => { + expect(defaultBuild.isOss()).toBe(false); + }); +}); + +describe('#getName()', () => { + it('returns kibana for default build', () => { + expect(defaultBuild.getName()).toBe('kibana'); + }); + + it('returns kibana-oss for oss', () => { + expect(ossBuild.getName()).toBe('kibana-oss'); + }); +}); + +describe('#getLogTag()', () => { + it('returns string with build name in it', () => { + expect(defaultBuild.getLogTag()).toContain(defaultBuild.getName()); + expect(ossBuild.getLogTag()).toContain(ossBuild.getName()); + }); +}); + +describe('#resolvePath()', () => { + it('uses passed config to resolve a path relative to the repo', () => { + expect(ossBuild.resolvePath('bar')).toMatchInlineSnapshot( + `/build/kibana-oss/bar` + ); + }); + + it('passes all arguments to config.resolveFromRepo()', () => { + expect(defaultBuild.resolvePath('bar', 'baz', 'box')).toMatchInlineSnapshot( + `/build/kibana/bar/baz/box` + ); + }); +}); + +describe('#resolvePathForPlatform()', () => { + it('uses config.resolveFromRepo(), config.getBuildVersion(), and platform.getBuildName() to create path', () => { + expect(ossBuild.resolvePathForPlatform(linuxPlatform, 'foo', 'bar')).toMatchInlineSnapshot( + `/build/oss/kibana-8.0.0-linux-x86_64/foo/bar` + ); + }); +}); + +describe('#getPlatformArchivePath()', () => { + it('creates correct path for different platforms', () => { + expect(ossBuild.getPlatformArchivePath(linuxPlatform)).toMatchInlineSnapshot( + `/target/kibana-oss-8.0.0-linux-x86_64.tar.gz` + ); + expect(ossBuild.getPlatformArchivePath(linuxArmPlatform)).toMatchInlineSnapshot( + `/target/kibana-oss-8.0.0-linux-aarch64.tar.gz` + ); + expect(ossBuild.getPlatformArchivePath(windowsPlatform)).toMatchInlineSnapshot( + `/target/kibana-oss-8.0.0-windows-x86_64.zip` + ); + }); +}); diff --git a/src/dev/build/lib/build.ts b/src/dev/build/lib/build.ts new file mode 100644 index 0000000000000..d0b03b4c5e4b2 --- /dev/null +++ b/src/dev/build/lib/build.ts @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import chalk from 'chalk'; + +import { Config } from './config'; +import { Platform } from './platform'; + +export class Build { + private name = this.oss ? 'kibana-oss' : 'kibana'; + private logTag = this.oss ? chalk`{magenta [kibana-oss]}` : chalk`{cyan [ kibana ]}`; + + constructor(private config: Config, private oss: boolean) {} + + isOss() { + return !!this.oss; + } + + resolvePath(...args: string[]) { + return this.config.resolveFromRepo('build', this.name, ...args); + } + + resolvePathForPlatform(platform: Platform, ...args: string[]) { + return this.config.resolveFromRepo( + 'build', + this.oss ? 'oss' : 'default', + `kibana-${this.config.getBuildVersion()}-${platform.getBuildName()}`, + ...args + ); + } + + getPlatformArchivePath(platform: Platform) { + const ext = platform.isWindows() ? 'zip' : 'tar.gz'; + return this.config.resolveFromRepo( + 'target', + `${this.name}-${this.config.getBuildVersion()}-${platform.getBuildName()}.${ext}` + ); + } + + getName() { + return this.name; + } + + getLogTag() { + return this.logTag; + } +} diff --git a/src/dev/build/lib/config.js b/src/dev/build/lib/config.js deleted file mode 100644 index 36621f1c2d4ac..0000000000000 --- a/src/dev/build/lib/config.js +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { dirname, resolve, relative } from 'path'; -import os from 'os'; - -import { getVersionInfo } from './version_info'; -import { createPlatform } from './platform'; - -export async function getConfig({ isRelease, targetAllPlatforms, versionQualifier }) { - const pkgPath = resolve(__dirname, '../../../../package.json'); - const pkg = require(pkgPath); // eslint-disable-line import/no-dynamic-require - const repoRoot = dirname(pkgPath); - const nodeVersion = pkg.engines.node; - - const platforms = [ - createPlatform('linux', 'x64', 'linux-x86_64'), - createPlatform('linux', 'arm64', 'linux-aarch64'), - createPlatform('darwin', 'x64', 'darwin-x86_64'), - createPlatform('win32', 'x64', 'windows-x86_64'), - ]; - - const versionInfo = await getVersionInfo({ - isRelease, - versionQualifier, - pkg, - }); - - return new (class Config { - /** - * Get Kibana's parsed package.json file - * @return {Object} - */ - getKibanaPkg() { - return pkg; - } - - isRelease() { - return isRelease; - } - - /** - * Get the node version required by Kibana - * @return {String} - */ - getNodeVersion() { - return nodeVersion; - } - - /** - * Convert an absolute path to a relative path, based from the repo - * @param {String} absolutePath - * @return {String} - */ - getRepoRelativePath(absolutePath) { - return relative(repoRoot, absolutePath); - } - - /** - * Resolve a set of relative paths based from the directory of the Kibana repo - * @param {...String} ...subPaths - * @return {String} - */ - resolveFromRepo(...subPaths) { - return resolve(repoRoot, ...subPaths); - } - - /** - * Return the list of Platforms we are targeting, if --this-platform flag is - * specified only the platform for this OS will be returned - * @return {Array} - */ - getTargetPlatforms() { - if (targetAllPlatforms) { - return platforms; - } - - return [this.getPlatformForThisOs()]; - } - - /** - * Return the list of Platforms we need/have node downloads for. We always - * include the linux platform even if we aren't targeting linux so we can - * reliably get the LICENSE file, which isn't included in the windows version - * @return {Array} - */ - getNodePlatforms() { - if (targetAllPlatforms) { - return platforms; - } - - if (process.platform === 'linux') { - return [this.getPlatform('linux', 'x64')]; - } - - return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')]; - } - - getPlatform(name, arch) { - const selected = platforms.find((p) => { - return name === p.getName() && arch === p.getArchitecture(); - }); - - if (!selected) { - throw new Error(`Unable to find platform (${name}) with architecture (${arch})`); - } - - return selected; - } - - /** - * Get the platform object representing the OS on this machine - * @return {Platform} - */ - getPlatformForThisOs() { - return this.getPlatform(os.platform(), os.arch()); - } - - /** - * Get the version to use for this build - * @return {String} - */ - getBuildVersion() { - return versionInfo.buildVersion; - } - - /** - * Get the build number of this build - * @return {Number} - */ - getBuildNumber() { - return versionInfo.buildNumber; - } - - /** - * Get the git sha for this build - * @return {String} - */ - getBuildSha() { - return versionInfo.buildSha; - } - - /** - * Resolve a set of paths based from the target directory for this build. - * @param {...String} ...subPaths - * @return {String} - */ - resolveFromTarget(...subPaths) { - return resolve(repoRoot, 'target', ...subPaths); - } - })(); -} diff --git a/src/dev/build/lib/config.test.ts b/src/dev/build/lib/config.test.ts new file mode 100644 index 0000000000000..0539adc840a6a --- /dev/null +++ b/src/dev/build/lib/config.test.ts @@ -0,0 +1,201 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { resolve } from 'path'; + +import { createAbsolutePathSerializer, REPO_ROOT } from '@kbn/dev-utils'; + +import pkg from '../../../../package.json'; +import { Config } from './config'; + +jest.mock('./version_info', () => ({ + getVersionInfo: () => ({ + buildSha: 'abc1234', + buildVersion: '8.0.0', + buildNumber: 1234, + }), +})); + +const versionInfo = jest.requireMock('./version_info').getVersionInfo(); + +expect.addSnapshotSerializer(createAbsolutePathSerializer()); + +const setup = async ({ targetAllPlatforms = true }: { targetAllPlatforms?: boolean } = {}) => { + return await Config.create({ + isRelease: true, + targetAllPlatforms, + }); +}; + +describe('#getKibanaPkg()', () => { + it('returns the parsed package.json from the Kibana repo', async () => { + const config = await setup(); + expect(config.getKibanaPkg()).toEqual(pkg); + }); +}); + +describe('#getNodeVersion()', () => { + it('returns the node version from the kibana package.json', async () => { + const config = await setup(); + expect(config.getNodeVersion()).toEqual(pkg.engines.node); + }); +}); + +describe('#getRepoRelativePath()', () => { + it('converts an absolute path to relative path, from the root of the repo', async () => { + const config = await setup(); + expect(config.getRepoRelativePath(__dirname)).toMatchInlineSnapshot(`"src/dev/build/lib"`); + }); +}); + +describe('#resolveFromRepo()', () => { + it('resolves a relative path', async () => { + const config = await setup(); + expect(config.resolveFromRepo('src/dev/build')).toMatchInlineSnapshot( + `/src/dev/build` + ); + }); + + it('resolves a series of relative paths', async () => { + const config = await setup(); + expect(config.resolveFromRepo('src', 'dev', 'build')).toMatchInlineSnapshot( + `/src/dev/build` + ); + }); +}); + +describe('#getPlatform()', () => { + it('throws error when platform does not exist', async () => { + const config = await setup(); + expect(() => { + config.getPlatform( + // @ts-expect-error invalid platform name + 'foo', + 'x64' + ); + }).toThrowErrorMatchingInlineSnapshot( + `"Unable to find platform (foo) with architecture (x64)"` + ); + }); + + it('throws error when architecture does not exist', async () => { + const config = await setup(); + expect(() => { + config.getPlatform( + 'linux', + // @ts-expect-error invalid platform arch + 'foo' + ); + }).toThrowErrorMatchingInlineSnapshot( + `"Unable to find platform (linux) with architecture (foo)"` + ); + }); +}); + +describe('#getTargetPlatforms()', () => { + it('returns an array of all platform objects', async () => { + const config = await setup(); + expect( + config + .getTargetPlatforms() + .map((p) => p.getNodeArch()) + .sort() + ).toMatchInlineSnapshot(` + Array [ + "darwin-x64", + "linux-arm64", + "linux-x64", + "win32-x64", + ] + `); + }); + + it('returns just this platform when targetAllPlatforms = false', async () => { + const config = await setup({ + targetAllPlatforms: false, + }); + + expect(config.getTargetPlatforms()).toEqual([config.getPlatformForThisOs()]); + }); +}); + +describe('#getNodePlatforms()', () => { + it('returns all platforms', async () => { + const config = await setup(); + expect( + config + .getTargetPlatforms() + .map((p) => p.getNodeArch()) + .sort() + ).toEqual(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']); + }); + + it('returns this platform and linux, when targetAllPlatforms = false', async () => { + const config = await setup({ + targetAllPlatforms: false, + }); + const platforms = config.getNodePlatforms(); + expect(platforms).toBeInstanceOf(Array); + if (process.platform !== 'linux') { + expect(platforms).toHaveLength(2); + expect(platforms[0]).toBe(config.getPlatformForThisOs()); + expect(platforms[1]).toBe(config.getPlatform('linux', 'x64')); + } else { + expect(platforms).toHaveLength(1); + expect(platforms[0]).toBe(config.getPlatform('linux', 'x64')); + } + }); +}); + +describe('#getPlatformForThisOs()', () => { + it('returns the platform that matches the arch of this machine', async () => { + const config = await setup(); + const currentPlatform = config.getPlatformForThisOs(); + expect(currentPlatform.getName()).toBe(process.platform); + expect(currentPlatform.getArchitecture()).toBe(process.arch); + }); +}); + +describe('#getBuildVersion()', () => { + it('returns the version from the build info', async () => { + const config = await setup(); + expect(config.getBuildVersion()).toBe(versionInfo.buildVersion); + }); +}); + +describe('#getBuildNumber()', () => { + it('returns the number from the build info', async () => { + const config = await setup(); + expect(config.getBuildNumber()).toBe(versionInfo.buildNumber); + }); +}); + +describe('#getBuildSha()', () => { + it('returns the sha from the build info', async () => { + const config = await setup(); + expect(config.getBuildSha()).toBe(versionInfo.buildSha); + }); +}); + +describe('#resolveFromTarget()', () => { + it('resolves a relative path, from the target directory', async () => { + const config = await setup(); + expect(config.resolveFromTarget()).toBe(resolve(REPO_ROOT, 'target')); + }); +}); diff --git a/src/dev/build/lib/config.ts b/src/dev/build/lib/config.ts new file mode 100644 index 0000000000000..338c89b1930d8 --- /dev/null +++ b/src/dev/build/lib/config.ts @@ -0,0 +1,173 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { dirname, resolve, relative } from 'path'; +import os from 'os'; +import loadJsonFile from 'load-json-file'; + +import { getVersionInfo, VersionInfo } from './version_info'; +import { PlatformName, PlatformArchitecture, ALL_PLATFORMS } from './platform'; + +interface Options { + isRelease: boolean; + targetAllPlatforms: boolean; + versionQualifier?: string; +} + +interface Package { + version: string; + engines: { node: string }; + workspaces: { + packages: string[]; + }; + [key: string]: unknown; +} + +export class Config { + static async create({ isRelease, targetAllPlatforms, versionQualifier }: Options) { + const pkgPath = resolve(__dirname, '../../../../package.json'); + const pkg: Package = loadJsonFile.sync(pkgPath); + + return new Config( + targetAllPlatforms, + pkg, + pkg.engines.node, + dirname(pkgPath), + await getVersionInfo({ + isRelease, + versionQualifier, + pkg, + }), + isRelease + ); + } + + constructor( + private readonly targetAllPlatforms: boolean, + private readonly pkg: Package, + private readonly nodeVersion: string, + private readonly repoRoot: string, + private readonly versionInfo: VersionInfo, + public readonly isRelease: boolean + ) {} + + /** + * Get Kibana's parsed package.json file + */ + getKibanaPkg() { + return this.pkg; + } + + /** + * Get the node version required by Kibana + */ + getNodeVersion() { + return this.nodeVersion; + } + + /** + * Convert an absolute path to a relative path, based from the repo + */ + getRepoRelativePath(absolutePath: string) { + return relative(this.repoRoot, absolutePath); + } + + /** + * Resolve a set of relative paths based from the directory of the Kibana repo + */ + resolveFromRepo(...subPaths: string[]) { + return resolve(this.repoRoot, ...subPaths); + } + + /** + * Return the list of Platforms we are targeting, if --this-platform flag is + * specified only the platform for this OS will be returned + */ + getTargetPlatforms() { + if (this.targetAllPlatforms) { + return ALL_PLATFORMS; + } + + return [this.getPlatformForThisOs()]; + } + + /** + * Return the list of Platforms we need/have node downloads for. We always + * include the linux platform even if we aren't targeting linux so we can + * reliably get the LICENSE file, which isn't included in the windows version + */ + getNodePlatforms() { + if (this.targetAllPlatforms) { + return ALL_PLATFORMS; + } + + if (process.platform === 'linux') { + return [this.getPlatform('linux', 'x64')]; + } + + return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')]; + } + + getPlatform(name: PlatformName, arch: PlatformArchitecture) { + const selected = ALL_PLATFORMS.find((p) => { + return name === p.getName() && arch === p.getArchitecture(); + }); + + if (!selected) { + throw new Error(`Unable to find platform (${name}) with architecture (${arch})`); + } + + return selected; + } + + /** + * Get the platform object representing the OS on this machine + */ + getPlatformForThisOs() { + return this.getPlatform(os.platform() as PlatformName, os.arch() as PlatformArchitecture); + } + + /** + * Get the version to use for this build + */ + getBuildVersion() { + return this.versionInfo.buildVersion; + } + + /** + * Get the build number of this build + */ + getBuildNumber() { + return this.versionInfo.buildNumber; + } + + /** + * Get the git sha for this build + */ + getBuildSha() { + return this.versionInfo.buildSha; + } + + /** + * Resolve a set of paths based from the target directory for this build. + */ + resolveFromTarget(...subPaths: string[]) { + return resolve(this.repoRoot, 'target', ...subPaths); + } +} diff --git a/src/dev/build/lib/download.js b/src/dev/build/lib/download.ts similarity index 81% rename from src/dev/build/lib/download.js rename to src/dev/build/lib/download.ts index fbd2d47ff7b06..7c1618b833b45 100644 --- a/src/dev/build/lib/download.js +++ b/src/dev/build/lib/download.ts @@ -23,10 +23,15 @@ import { dirname } from 'path'; import chalk from 'chalk'; import { createHash } from 'crypto'; import Axios from 'axios'; +import { ToolingLog } from '@kbn/dev-utils'; + +// https://github.com/axios/axios/tree/ffea03453f77a8176c51554d5f6c3c6829294649/lib/adapters +// @ts-expect-error untyped internal module used to prevent axios from using xhr adapter in tests +import AxiosHttpAdapter from 'axios/lib/adapters/http'; import { mkdirp } from './fs'; -function tryUnlink(path) { +function tryUnlink(path: string) { try { unlinkSync(path); } catch (error) { @@ -36,7 +41,14 @@ function tryUnlink(path) { } } -export async function download(options) { +interface DownloadOptions { + log: ToolingLog; + url: string; + destination: string; + sha256: string; + retries?: number; +} +export async function download(options: DownloadOptions): Promise { const { log, url, destination, sha256, retries = 0 } = options; if (!sha256) { @@ -52,8 +64,9 @@ export async function download(options) { log.debug(`Attempting download of ${url}`, chalk.dim(sha256)); const response = await Axios.request({ - url: url, + url, responseType: 'stream', + adapter: AxiosHttpAdapter, }); if (response.status !== 200) { @@ -62,7 +75,7 @@ export async function download(options) { const hash = createHash('sha256'); await new Promise((resolve, reject) => { - response.data.on('data', (chunk) => { + response.data.on('data', (chunk: Buffer) => { hash.update(chunk); writeSync(fileHandle, chunk); }); diff --git a/src/dev/build/lib/__tests__/errors.js b/src/dev/build/lib/errors.test.ts similarity index 67% rename from src/dev/build/lib/__tests__/errors.js rename to src/dev/build/lib/errors.test.ts index dc23b3e372bc6..0bf96463555fe 100644 --- a/src/dev/build/lib/__tests__/errors.js +++ b/src/dev/build/lib/errors.test.ts @@ -17,28 +17,26 @@ * under the License. */ -import expect from '@kbn/expect'; - -import { isErrorLogged, markErrorLogged } from '../errors'; +import { isErrorLogged, markErrorLogged } from './errors'; describe('dev/build/lib/errors', () => { describe('isErrorLogged()/markErrorLogged()', () => { it('returns true if error has been passed to markErrorLogged()', () => { const error = new Error(); - expect(isErrorLogged(error)).to.be(false); + expect(isErrorLogged(error)).toBe(false); markErrorLogged(error); - expect(isErrorLogged(error)).to.be(true); + expect(isErrorLogged(error)).toBe(true); }); describe('isErrorLogged()', () => { it('handles any value type', () => { - expect(isErrorLogged(null)).to.be(false); - expect(isErrorLogged(undefined)).to.be(false); - expect(isErrorLogged(1)).to.be(false); - expect(isErrorLogged([])).to.be(false); - expect(isErrorLogged({})).to.be(false); - expect(isErrorLogged(/foo/)).to.be(false); - expect(isErrorLogged(new Date())).to.be(false); + expect(isErrorLogged(null)).toBe(false); + expect(isErrorLogged(undefined)).toBe(false); + expect(isErrorLogged(1)).toBe(false); + expect(isErrorLogged([])).toBe(false); + expect(isErrorLogged({})).toBe(false); + expect(isErrorLogged(/foo/)).toBe(false); + expect(isErrorLogged(new Date())).toBe(false); }); }); }); diff --git a/src/dev/build/lib/errors.js b/src/dev/build/lib/errors.ts similarity index 86% rename from src/dev/build/lib/errors.js rename to src/dev/build/lib/errors.ts index 7fb8e2dc070d1..8405e9d29a033 100644 --- a/src/dev/build/lib/errors.js +++ b/src/dev/build/lib/errors.ts @@ -17,13 +17,13 @@ * under the License. */ -const loggedErrors = new WeakSet(); +const loggedErrors = new WeakSet(); -export function markErrorLogged(error) { +export function markErrorLogged(error: T): T { loggedErrors.add(error); return error; } -export function isErrorLogged(error) { +export function isErrorLogged(error: any) { return loggedErrors.has(error); } diff --git a/src/dev/build/lib/exec.test.ts b/src/dev/build/lib/exec.test.ts new file mode 100644 index 0000000000000..6f6ec4f26afbb --- /dev/null +++ b/src/dev/build/lib/exec.test.ts @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Path from 'path'; + +import { + ToolingLog, + ToolingLogCollectingWriter, + createStripAnsiSerializer, + createRecursiveSerializer, +} from '@kbn/dev-utils'; + +import { exec } from './exec'; + +const testWriter = new ToolingLogCollectingWriter(); +const log = new ToolingLog(); +log.setWriters([testWriter]); + +expect.addSnapshotSerializer(createStripAnsiSerializer()); +expect.addSnapshotSerializer( + createRecursiveSerializer( + (v) => v.includes(process.execPath), + (v) => v.split(Path.dirname(process.execPath)).join('') + ) +); + +beforeEach(() => { + testWriter.messages.length = 0; +}); + +it('executes a command, logs the command, and logs the output', async () => { + await exec(log, process.execPath, ['-e', 'console.log("hi")']); + expect(testWriter.messages).toMatchInlineSnapshot(` + Array [ + " debg $ /node -e console.log(\\"hi\\")", + " debg hi", + ] + `); +}); + +it('logs using level: option', async () => { + await exec(log, process.execPath, ['-e', 'console.log("hi")'], { + level: 'info', + }); + expect(testWriter.messages).toMatchInlineSnapshot(` + Array [ + " info $ /node -e console.log(\\"hi\\")", + " info hi", + ] + `); +}); diff --git a/src/dev/build/lib/exec.js b/src/dev/build/lib/exec.ts similarity index 73% rename from src/dev/build/lib/exec.js rename to src/dev/build/lib/exec.ts index 5e47500c72c5c..c3870230b8f31 100644 --- a/src/dev/build/lib/exec.js +++ b/src/dev/build/lib/exec.ts @@ -19,12 +19,23 @@ import execa from 'execa'; import chalk from 'chalk'; +import { ToolingLog, LogLevel } from '@kbn/dev-utils'; -import { watchStdioForLine } from '../../../legacy/utils'; +import { watchStdioForLine } from './watch_stdio_for_line'; -export async function exec(log, cmd, args, options = {}) { - const { level = 'debug', cwd, env, exitAfter } = options; +interface Options { + level?: Exclude; + cwd?: string; + env?: Record; + exitAfter?: RegExp; +} +export async function exec( + log: ToolingLog, + cmd: string, + args: string[], + { level = 'debug', cwd, env, exitAfter }: Options = {} +) { log[level](chalk.dim('$'), cmd, ...args); const proc = execa(cmd, args, { diff --git a/src/dev/build/lib/fs.js b/src/dev/build/lib/fs.ts similarity index 56% rename from src/dev/build/lib/fs.js rename to src/dev/build/lib/fs.ts index b905f40d0de1e..d86901c41e436 100644 --- a/src/dev/build/lib/fs.js +++ b/src/dev/build/lib/fs.ts @@ -17,28 +17,31 @@ * under the License. */ -import archiver from 'archiver'; import fs from 'fs'; import { createHash } from 'crypto'; +import { pipeline, Writable } from 'stream'; import { resolve, dirname, isAbsolute, sep } from 'path'; import { createGunzip } from 'zlib'; -import { inspect } from 'util'; +import { inspect, promisify } from 'util'; +import archiver from 'archiver'; import vfs from 'vinyl-fs'; -import { promisify } from 'bluebird'; +import File from 'vinyl'; import del from 'del'; import deleteEmpty from 'delete-empty'; -import { createPromiseFromStreams, createMapStream } from '../../../legacy/utils'; - -import tar from 'tar'; +import tar, { ExtractOptions } from 'tar'; +import { ToolingLog } from '@kbn/dev-utils'; +const pipelineAsync = promisify(pipeline); const mkdirAsync = promisify(fs.mkdir); const writeFileAsync = promisify(fs.writeFile); const readFileAsync = promisify(fs.readFile); const readdirAsync = promisify(fs.readdir); const utimesAsync = promisify(fs.utimes); +const copyFileAsync = promisify(fs.copyFile); +const statAsync = promisify(fs.stat); -export function assertAbsolute(path) { +export function assertAbsolute(path: string) { if (!isAbsolute(path)) { throw new TypeError( 'Please use absolute paths to keep things explicit. You probably want to use `build.resolvePath()` or `config.resolveFromRepo()`.' @@ -46,7 +49,7 @@ export function assertAbsolute(path) { } } -export function isFileAccessible(path) { +export function isFileAccessible(path: string) { assertAbsolute(path); try { @@ -57,35 +60,35 @@ export function isFileAccessible(path) { } } -function longInspect(value) { +function longInspect(value: any) { return inspect(value, { maxArrayLength: Infinity, }); } -export async function mkdirp(path) { +export async function mkdirp(path: string) { assertAbsolute(path); await mkdirAsync(path, { recursive: true }); } -export async function write(path, contents) { +export async function write(path: string, contents: string) { assertAbsolute(path); await mkdirp(dirname(path)); await writeFileAsync(path, contents); } -export async function read(path) { +export async function read(path: string) { assertAbsolute(path); return await readFileAsync(path, 'utf8'); } -export async function getChildPaths(path) { +export async function getChildPaths(path: string) { assertAbsolute(path); const childNames = await readdirAsync(path); return childNames.map((name) => resolve(path, name)); } -export async function deleteAll(patterns, log) { +export async function deleteAll(patterns: string[], log: ToolingLog) { if (!Array.isArray(patterns)) { throw new TypeError('Expected patterns to be an array'); } @@ -108,7 +111,11 @@ export async function deleteAll(patterns, log) { } } -export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) { +export async function deleteEmptyFolders( + log: ToolingLog, + rootFolderPath: string, + foldersToKeep: string[] +) { if (typeof rootFolderPath !== 'string') { throw new TypeError('Expected root folder to be a string path'); } @@ -121,7 +128,11 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) { // Delete empty is used to gather all the empty folders and // then we use del to actually delete them - const emptyFoldersList = await deleteEmpty(rootFolderPath, { dryRun: true }); + const emptyFoldersList = await deleteEmpty(rootFolderPath, { + // @ts-expect-error DT package has incorrect types https://github.com/jonschlinkert/delete-empty/blob/6ae34547663e6845c3c98b184c606fa90ef79c0a/index.js#L160 + dryRun: true, + }); + const foldersToDelete = emptyFoldersList.filter((folderToDelete) => { return !foldersToKeep.some((folderToKeep) => folderToDelete.includes(folderToKeep)); }); @@ -133,85 +144,153 @@ export async function deleteEmptyFolders(log, rootFolderPath, foldersToKeep) { log.verbose('Deleted:', longInspect(deletedEmptyFolders)); } -export async function copyAll(sourceDir, destination, options = {}) { - const { select = ['**/*'], dot = false, time } = options; +interface CopyOptions { + clone?: boolean; +} +export async function copy(source: string, destination: string, options: CopyOptions = {}) { + assertAbsolute(source); + assertAbsolute(destination); + + // ensure source exists before creating destination directory and copying source + await statAsync(source); + await mkdirp(dirname(destination)); + return await copyFileAsync( + source, + destination, + options.clone ? fs.constants.COPYFILE_FICLONE : 0 + ); +} + +interface CopyAllOptions { + select?: string[]; + dot?: boolean; + time?: string | number | Date; +} + +export async function copyAll( + sourceDir: string, + destination: string, + options: CopyAllOptions = {} +) { + const { select = ['**/*'], dot = false, time = Date.now() } = options; assertAbsolute(sourceDir); assertAbsolute(destination); - await createPromiseFromStreams([ + await pipelineAsync( vfs.src(select, { buffer: false, cwd: sourceDir, base: sourceDir, dot, }), - vfs.dest(destination), - ]); + vfs.dest(destination) + ); // we must update access and modified file times after the file copy // has completed, otherwise the copy action can effect modify times. if (Boolean(time)) { - await createPromiseFromStreams([ + await pipelineAsync( vfs.src(select, { buffer: false, cwd: destination, base: destination, dot, }), - createMapStream((file) => utimesAsync(file.path, time, time)), - ]); + new Writable({ + objectMode: true, + write(file: File, _, cb) { + utimesAsync(file.path, time, time).then(() => cb(), cb); + }, + }) + ); } } -export async function getFileHash(path, algo) { +export async function getFileHash(path: string, algo: string) { assertAbsolute(path); const hash = createHash(algo); const readStream = fs.createReadStream(path); - await new Promise((resolve, reject) => { + await new Promise((res, rej) => { readStream .on('data', (chunk) => hash.update(chunk)) - .on('error', reject) - .on('end', resolve); + .on('error', rej) + .on('end', res); }); return hash.digest('hex'); } -export async function untar(source, destination, extractOptions = {}) { +export async function untar( + source: string, + destination: string, + extractOptions: ExtractOptions = {} +) { assertAbsolute(source); assertAbsolute(destination); await mkdirAsync(destination, { recursive: true }); - await createPromiseFromStreams([ + await pipelineAsync( fs.createReadStream(source), createGunzip(), tar.extract({ ...extractOptions, cwd: destination, - }), - ]); + }) + ); } -export async function gunzip(source, destination) { +export async function gunzip(source: string, destination: string) { assertAbsolute(source); assertAbsolute(destination); await mkdirAsync(dirname(destination), { recursive: true }); - await createPromiseFromStreams([ + await pipelineAsync( fs.createReadStream(source), createGunzip(), - fs.createWriteStream(destination), - ]); + fs.createWriteStream(destination) + ); +} + +interface CompressTarOptions { + createRootDirectory: boolean; + source: string; + destination: string; + archiverOptions?: archiver.TarOptions & archiver.CoreOptions; } +export async function compressTar({ + source, + destination, + archiverOptions, + createRootDirectory, +}: CompressTarOptions) { + const output = fs.createWriteStream(destination); + const archive = archiver('tar', archiverOptions); + const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false; + + archive.pipe(output); -export async function compress(type, options = {}, source, destination) { + return archive.directory(source, name).finalize(); +} + +interface CompressZipOptions { + createRootDirectory: boolean; + source: string; + destination: string; + archiverOptions?: archiver.ZipOptions & archiver.CoreOptions; +} +export async function compressZip({ + source, + destination, + archiverOptions, + createRootDirectory, +}: CompressZipOptions) { const output = fs.createWriteStream(destination); - const archive = archiver(type, options.archiverOptions); - const name = options.createRootDirectory ? source.split(sep).slice(-1)[0] : false; + const archive = archiver('zip', archiverOptions); + const name = createRootDirectory ? source.split(sep).slice(-1)[0] : false; archive.pipe(output); diff --git a/src/dev/build/lib/index.js b/src/dev/build/lib/index.js deleted file mode 100644 index 6540db6f37a72..0000000000000 --- a/src/dev/build/lib/index.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { getConfig } from './config'; -export { createRunner } from './runner'; -export { isErrorLogged } from './errors'; -export { exec } from './exec'; -export { - read, - write, - mkdirp, - copyAll, - getFileHash, - untar, - gunzip, - deleteAll, - deleteEmptyFolders, - compress, - isFileAccessible, -} from './fs'; -export { download } from './download'; -export { scanDelete } from './scan_delete'; -export { scanCopy } from './scan_copy'; diff --git a/src/dev/build/lib/index.ts b/src/dev/build/lib/index.ts new file mode 100644 index 0000000000000..339dc41cc6ccf --- /dev/null +++ b/src/dev/build/lib/index.ts @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export * from './config'; +export * from './build'; +export * from './runner'; +export * from './errors'; +export * from './exec'; +export * from './fs'; +export * from './download'; +export * from './scan_delete'; +export * from './scan_copy'; +export * from './platform'; +export * from './scan'; diff --git a/src/dev/build/lib/integration_tests/download.test.ts b/src/dev/build/lib/integration_tests/download.test.ts new file mode 100644 index 0000000000000..a86d5292501f5 --- /dev/null +++ b/src/dev/build/lib/integration_tests/download.test.ts @@ -0,0 +1,226 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createServer, IncomingMessage, ServerResponse } from 'http'; +import { join } from 'path'; +import { tmpdir } from 'os'; +import { readFileSync } from 'fs'; + +import del from 'del'; +import { CI_PARALLEL_PROCESS_PREFIX } from '@kbn/test'; +import { ToolingLog } from '@kbn/dev-utils'; + +import { mkdirp } from '../fs'; +import { download } from '../download'; + +const TMP_DIR = join(tmpdir(), CI_PARALLEL_PROCESS_PREFIX, 'download-js-test-tmp-dir'); +const TMP_DESTINATION = join(TMP_DIR, '__tmp_download_js_test_file__'); + +beforeEach(async () => { + await del(TMP_DIR, { force: true }); + await mkdirp(TMP_DIR); + jest.clearAllMocks(); +}); + +afterEach(async () => { + await del(TMP_DIR, { force: true }); +}); + +const onLogLine = jest.fn(); +const log = new ToolingLog({ + level: 'verbose', + writeTo: { + write: onLogLine, + }, +}); + +type Handler = (req: IncomingMessage, res: ServerResponse) => void; + +const FOO_SHA256 = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'; +const createSendHandler = (send: any): Handler => (req, res) => { + res.statusCode = 200; + res.end(send); +}; +const sendErrorHandler: Handler = (req, res) => { + res.statusCode = 500; + res.end(); +}; + +let serverUrl: string; +let nextHandler: Handler | null = null; +const server = createServer((req, res) => { + if (!nextHandler) { + nextHandler = sendErrorHandler; + } + + const handler = nextHandler; + nextHandler = null; + handler(req, res); +}); + +afterEach(() => (nextHandler = null)); + +beforeAll(async () => { + await Promise.race([ + new Promise((_, reject) => { + server.once('error', reject); + }), + new Promise((resolve) => { + server.listen(resolve); + }), + ]); + + // address is only a string when listening to a UNIX socket, and undefined when we haven't called listen() yet + const address = server.address() as { port: number }; + + serverUrl = `http://localhost:${address.port}/`; +}); + +afterAll(async () => { + server.close(); +}); + +it('downloads from URL and checks that content matches sha256', async () => { + nextHandler = createSendHandler('foo'); + await download({ + log, + url: serverUrl, + destination: TMP_DESTINATION, + sha256: FOO_SHA256, + }); + expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo'); +}); + +it('rejects and deletes destination if sha256 does not match', async () => { + nextHandler = createSendHandler('foo'); + + try { + await download({ + log, + url: serverUrl, + destination: TMP_DESTINATION, + sha256: 'bar', + }); + throw new Error('Expected download() to reject'); + } catch (error) { + expect(error).toHaveProperty( + 'message', + expect.stringContaining('does not match the expected sha256 checksum') + ); + } + + try { + readFileSync(TMP_DESTINATION); + throw new Error('Expected download to be deleted'); + } catch (error) { + expect(error).toHaveProperty('code', 'ENOENT'); + } +}); + +describe('reties download retries: number of times', () => { + it('resolves if retries = 1 and first attempt fails', async () => { + let reqCount = 0; + nextHandler = function sequenceHandler(req, res) { + switch (++reqCount) { + case 1: + nextHandler = sequenceHandler; + return sendErrorHandler(req, res); + default: + return createSendHandler('foo')(req, res); + } + }; + + await download({ + log, + url: serverUrl, + destination: TMP_DESTINATION, + sha256: FOO_SHA256, + retries: 2, + }); + + expect(readFileSync(TMP_DESTINATION, 'utf8')).toBe('foo'); + }); + + it('resolves if first fails, second is bad shasum, but third succeeds', async () => { + let reqCount = 0; + nextHandler = function sequenceHandler(req, res) { + switch (++reqCount) { + case 1: + nextHandler = sequenceHandler; + return sendErrorHandler(req, res); + case 2: + nextHandler = sequenceHandler; + return createSendHandler('bar')(req, res); + default: + return createSendHandler('foo')(req, res); + } + }; + + await download({ + log, + url: serverUrl, + destination: TMP_DESTINATION, + sha256: FOO_SHA256, + retries: 2, + }); + }); + + it('makes 6 requests if `retries: 5` and all failed', async () => { + let reqCount = 0; + nextHandler = function sequenceHandler(req, res) { + reqCount += 1; + nextHandler = sequenceHandler; + sendErrorHandler(req, res); + }; + + try { + await download({ + log, + url: serverUrl, + destination: TMP_DESTINATION, + sha256: FOO_SHA256, + retries: 5, + }); + throw new Error('Expected download() to reject'); + } catch (error) { + expect(error).toHaveProperty( + 'message', + expect.stringContaining('Request failed with status code 500') + ); + expect(reqCount).toBe(6); + } + }); +}); + +describe('sha256 option not supplied', () => { + it('refuses to download', async () => { + try { + // @ts-expect-error missing sha256 param is intentional + await download({ + log, + url: 'http://google.com', + destination: TMP_DESTINATION, + }); + + throw new Error('expected download() to reject'); + } catch (error) { + expect(error).toHaveProperty('message', expect.stringContaining('refusing to download')); + } + }); +}); diff --git a/src/dev/build/lib/integration_tests/fs.test.ts b/src/dev/build/lib/integration_tests/fs.test.ts new file mode 100644 index 0000000000000..e9ce09554159b --- /dev/null +++ b/src/dev/build/lib/integration_tests/fs.test.ts @@ -0,0 +1,358 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { resolve } from 'path'; +import { chmodSync, statSync } from 'fs'; + +import del from 'del'; + +import { mkdirp, write, read, getChildPaths, copyAll, getFileHash, untar, gunzip } from '../fs'; + +const TMP = resolve(__dirname, '../__tmp__'); +const FIXTURES = resolve(__dirname, '../__fixtures__'); +const FOO_TAR_PATH = resolve(FIXTURES, 'foo_dir.tar.gz'); +const FOO_GZIP_PATH = resolve(FIXTURES, 'foo.txt.gz'); +const BAR_TXT_PATH = resolve(FIXTURES, 'foo_dir/bar.txt'); +const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable'); + +const isWindows = /^win/.test(process.platform); + +// get the mode of a file as a string, like 777, or 644, +function getCommonMode(path: string) { + return statSync(path).mode.toString(8).slice(-3); +} + +function assertNonAbsoluteError(error: any) { + expect(error).toBeInstanceOf(Error); + expect(error.message).toContain('Please use absolute paths'); +} + +// ensure WORLD_EXECUTABLE is actually executable by all +beforeAll(async () => { + chmodSync(WORLD_EXECUTABLE, 0o777); +}); + +// clean and recreate TMP directory +beforeEach(async () => { + await del(TMP); + await mkdirp(TMP); +}); + +// cleanup TMP directory +afterAll(async () => { + await del(TMP); +}); + +describe('mkdirp()', () => { + it('rejects if path is not absolute', async () => { + try { + await mkdirp('foo/bar'); + throw new Error('Expected mkdirp() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('makes directory and necessary parent directories', async () => { + const destination = resolve(TMP, 'a/b/c/d/e/f/g'); + + expect(await mkdirp(destination)).toBe(undefined); + + expect(statSync(destination).isDirectory()).toBe(true); + }); +}); + +describe('write()', () => { + it('rejects if path is not absolute', async () => { + try { + // @ts-expect-error missing content intentional + await write('foo/bar'); + throw new Error('Expected write() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('writes content to a file with existing parent directory', async () => { + const destination = resolve(TMP, 'a'); + + expect(await write(destination, 'bar')).toBe(undefined); + expect(await read(destination)).toBe('bar'); + }); + + it('writes content to a file with missing parents', async () => { + const destination = resolve(TMP, 'a/b/c/d/e'); + + expect(await write(destination, 'bar')).toBe(undefined); + expect(await read(destination)).toBe('bar'); + }); +}); + +describe('read()', () => { + it('rejects if path is not absolute', async () => { + try { + await read('foo/bar'); + throw new Error('Expected read() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('reads file, resolves with result', async () => { + expect(await read(BAR_TXT_PATH)).toBe('bar\n'); + }); +}); + +describe('getChildPaths()', () => { + it('rejects if path is not absolute', async () => { + try { + await getChildPaths('foo/bar'); + throw new Error('Expected getChildPaths() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('resolves with absolute paths to the children of directory', async () => { + const path = resolve(FIXTURES, 'foo_dir'); + expect((await getChildPaths(path)).sort()).toEqual([ + resolve(FIXTURES, 'foo_dir/.bar'), + BAR_TXT_PATH, + resolve(FIXTURES, 'foo_dir/foo'), + ]); + }); + + it('rejects with ENOENT if path does not exist', async () => { + try { + await getChildPaths(resolve(FIXTURES, 'notrealpath')); + throw new Error('Expected getChildPaths() to reject'); + } catch (error) { + expect(error).toHaveProperty('code', 'ENOENT'); + } + }); +}); + +describe('copyAll()', () => { + it('rejects if source path is not absolute', async () => { + try { + await copyAll('foo/bar', __dirname); + throw new Error('Expected copyAll() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if destination path is not absolute', async () => { + try { + await copyAll(__dirname, 'foo/bar'); + throw new Error('Expected copyAll() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if neither path is not absolute', async () => { + try { + await copyAll('foo/bar', 'foo/bar'); + throw new Error('Expected copyAll() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('copies files and directories from source to dest, creating dest if necessary, respecting mode', async () => { + const destination = resolve(TMP, 'a/b/c'); + await copyAll(FIXTURES, destination); + + expect((await getChildPaths(resolve(destination, 'foo_dir'))).sort()).toEqual([ + resolve(destination, 'foo_dir/bar.txt'), + resolve(destination, 'foo_dir/foo'), + ]); + + expect(getCommonMode(resolve(destination, 'bin/world_executable'))).toBe( + isWindows ? '666' : '777' + ); + expect(getCommonMode(resolve(destination, 'foo_dir/bar.txt'))).toBe(isWindows ? '666' : '644'); + }); + + it('applies select globs if specified, ignores dot files', async () => { + const destination = resolve(TMP, 'a/b/c/d'); + await copyAll(FIXTURES, destination, { + select: ['**/*bar*'], + }); + + try { + statSync(resolve(destination, 'bin/world_executable')); + throw new Error('expected bin/world_executable to not by copied'); + } catch (error) { + expect(error).toHaveProperty('code', 'ENOENT'); + } + + try { + statSync(resolve(destination, 'foo_dir/.bar')); + throw new Error('expected foo_dir/.bar to not by copied'); + } catch (error) { + expect(error).toHaveProperty('code', 'ENOENT'); + } + + expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n'); + }); + + it('supports select globs and dot option together', async () => { + const destination = resolve(TMP, 'a/b/c/d'); + await copyAll(FIXTURES, destination, { + select: ['**/*bar*'], + dot: true, + }); + + try { + statSync(resolve(destination, 'bin/world_executable')); + throw new Error('expected bin/world_executable to not by copied'); + } catch (error) { + expect(error).toHaveProperty('code', 'ENOENT'); + } + + expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n'); + expect(await read(resolve(destination, 'foo_dir/.bar'))).toBe('dotfile\n'); + }); + + it('supports atime and mtime', async () => { + const destination = resolve(TMP, 'a/b/c/d/e'); + const time = new Date(1425298511000); + await copyAll(FIXTURES, destination, { + time, + }); + const barTxt = statSync(resolve(destination, 'foo_dir/bar.txt')); + const fooDir = statSync(resolve(destination, 'foo_dir')); + + // precision is platform specific + const oneDay = 86400000; + expect(Math.abs(barTxt.atimeMs - time.getTime())).toBeLessThan(oneDay); + expect(Math.abs(fooDir.atimeMs - time.getTime())).toBeLessThan(oneDay); + expect(Math.abs(barTxt.mtimeMs - time.getTime())).toBeLessThan(oneDay); + }); +}); + +describe('getFileHash()', () => { + it('rejects if path is not absolute', async () => { + try { + await getFileHash('foo/bar', 'some content'); + throw new Error('Expected getFileHash() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('resolves with the sha1 hash of a file', async () => { + expect(await getFileHash(BAR_TXT_PATH, 'sha1')).toBe( + 'e242ed3bffccdf271b7fbaf34ed72d089537b42f' + ); + }); + it('resolves with the sha256 hash of a file', async () => { + expect(await getFileHash(BAR_TXT_PATH, 'sha256')).toBe( + '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730' + ); + }); + it('resolves with the md5 hash of a file', async () => { + expect(await getFileHash(BAR_TXT_PATH, 'md5')).toBe('c157a79031e1c40f85931829bc5fc552'); + }); +}); + +describe('untar()', () => { + it('rejects if source path is not absolute', async () => { + try { + await untar('foo/bar', '**/*'); + throw new Error('Expected untar() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if destination path is not absolute', async () => { + try { + await untar(__dirname, '**/*'); + throw new Error('Expected untar() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if neither path is not absolute', async () => { + try { + await untar('foo/bar', '**/*'); + throw new Error('Expected untar() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('extracts tarbar from source into destination, creating destination if necessary', async () => { + const destination = resolve(TMP, 'a/b/c/d/e/f'); + await untar(FOO_TAR_PATH, destination); + expect(await read(resolve(destination, 'foo_dir/bar.txt'))).toBe('bar\n'); + expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).toBe('foo\n'); + }); + + it('passed thrid argument to Extract class, overriding path with destination', async () => { + const destination = resolve(TMP, 'a/b/c'); + + await untar(FOO_TAR_PATH, destination, { + path: '/dev/null', + strip: 1, + }); + + expect(await read(resolve(destination, 'bar.txt'))).toBe('bar\n'); + expect(await read(resolve(destination, 'foo/foo.txt'))).toBe('foo\n'); + }); +}); + +describe('gunzip()', () => { + it('rejects if source path is not absolute', async () => { + try { + await gunzip('foo/bar', '**/*'); + throw new Error('Expected gunzip() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if destination path is not absolute', async () => { + try { + await gunzip(__dirname, '**/*'); + throw new Error('Expected gunzip() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('rejects if neither path is not absolute', async () => { + try { + await gunzip('foo/bar', '**/*'); + throw new Error('Expected gunzip() to reject'); + } catch (error) { + assertNonAbsoluteError(error); + } + }); + + it('extracts gzip from source into destination, creating destination if necessary', async () => { + const destination = resolve(TMP, 'z/y/x/v/u/t/foo.txt'); + await gunzip(FOO_GZIP_PATH, destination); + expect(await read(resolve(destination))).toBe('foo\n'); + }); +}); diff --git a/src/dev/build/lib/scan_copy.test.ts b/src/dev/build/lib/integration_tests/scan_copy.test.ts similarity index 94% rename from src/dev/build/lib/scan_copy.test.ts rename to src/dev/build/lib/integration_tests/scan_copy.test.ts index ba693770445dc..f81951c575313 100644 --- a/src/dev/build/lib/scan_copy.test.ts +++ b/src/dev/build/lib/integration_tests/scan_copy.test.ts @@ -22,14 +22,13 @@ import { resolve } from 'path'; import del from 'del'; -// @ts-ignore -import { getChildPaths, mkdirp, write } from './fs'; -import { scanCopy } from './scan_copy'; +import { getChildPaths } from '../fs'; +import { scanCopy } from '../scan_copy'; const IS_WINDOWS = process.platform === 'win32'; -const FIXTURES = resolve(__dirname, '__tests__/fixtures'); +const FIXTURES = resolve(__dirname, '../__fixtures__'); +const TMP = resolve(__dirname, '../__tmp__'); const WORLD_EXECUTABLE = resolve(FIXTURES, 'bin/world_executable'); -const TMP = resolve(__dirname, '__tests__/__tmp__'); const getCommonMode = (path: string) => statSync(path).mode.toString(8).slice(-3); diff --git a/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts b/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts new file mode 100644 index 0000000000000..007a3bc631c60 --- /dev/null +++ b/src/dev/build/lib/integration_tests/watch_stdio_for_line.test.ts @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import execa from 'execa'; + +import { watchStdioForLine } from '../watch_stdio_for_line'; + +const onLogLine = jest.fn(); + +beforeEach(() => { + jest.clearAllMocks(); +}); + +it('calls logFn with log lines', async () => { + const proc = execa(process.execPath, ['-e', 'console.log("hi")']); + await watchStdioForLine(proc, onLogLine); + expect(onLogLine.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + "hi", + ], + ] + `); +}); + +it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () { + const proc = execa(process.execPath, [require.resolve('../__fixtures__/log_on_sigint')]); + await watchStdioForLine(proc, onLogLine, /listening for SIGINT/); + expect(onLogLine.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + "listening for SIGINT", + ], + ] + `); +}); diff --git a/src/dev/build/lib/platform.js b/src/dev/build/lib/platform.js deleted file mode 100644 index ab2672615e1c5..0000000000000 --- a/src/dev/build/lib/platform.js +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export function createPlatform(name, architecture, buildName) { - return new (class Platform { - getName() { - return name; - } - - getArchitecture() { - return architecture; - } - - getBuildName() { - return buildName; - } - - getNodeArch() { - return `${name}-${architecture}`; - } - - isWindows() { - return name === 'win32'; - } - - isMac() { - return name === 'darwin'; - } - - isLinux() { - return name === 'linux'; - } - })(); -} diff --git a/src/dev/build/lib/platform.test.ts b/src/dev/build/lib/platform.test.ts new file mode 100644 index 0000000000000..a93333c57e75e --- /dev/null +++ b/src/dev/build/lib/platform.test.ts @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Platform } from './platform'; + +describe('getName()', () => { + it('returns the name argument', () => { + expect(new Platform('win32', 'x64', 'foo').getName()).toBe('win32'); + }); +}); + +describe('getNodeArch()', () => { + it('returns the node arch for the passed name', () => { + expect(new Platform('win32', 'x64', 'foo').getNodeArch()).toBe('win32-x64'); + }); +}); + +describe('getBuildName()', () => { + it('returns the build name for the passed name', () => { + expect(new Platform('linux', 'arm64', 'linux-aarch64').getBuildName()).toBe('linux-aarch64'); + }); +}); + +describe('isWindows()', () => { + it('returns true if name is win32', () => { + expect(new Platform('win32', 'x64', 'foo').isWindows()).toBe(true); + expect(new Platform('linux', 'x64', 'foo').isWindows()).toBe(false); + expect(new Platform('darwin', 'x64', 'foo').isWindows()).toBe(false); + }); +}); + +describe('isLinux()', () => { + it('returns true if name is linux', () => { + expect(new Platform('win32', 'x64', 'foo').isLinux()).toBe(false); + expect(new Platform('linux', 'x64', 'foo').isLinux()).toBe(true); + expect(new Platform('darwin', 'x64', 'foo').isLinux()).toBe(false); + }); +}); + +describe('isMac()', () => { + it('returns true if name is darwin', () => { + expect(new Platform('win32', 'x64', 'foo').isMac()).toBe(false); + expect(new Platform('linux', 'x64', 'foo').isMac()).toBe(false); + expect(new Platform('darwin', 'x64', 'foo').isMac()).toBe(true); + }); +}); diff --git a/src/dev/build/lib/platform.ts b/src/dev/build/lib/platform.ts new file mode 100644 index 0000000000000..f42c7eb7fba54 --- /dev/null +++ b/src/dev/build/lib/platform.ts @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export type PlatformName = 'win32' | 'darwin' | 'linux'; +export type PlatformArchitecture = 'x64' | 'arm64'; + +export class Platform { + constructor( + private name: PlatformName, + private architecture: PlatformArchitecture, + private buildName: string + ) {} + + getName() { + return this.name; + } + + getArchitecture() { + return this.architecture; + } + + getBuildName() { + return this.buildName; + } + + getNodeArch() { + return `${this.name}-${this.architecture}`; + } + + isWindows() { + return this.name === 'win32'; + } + + isMac() { + return this.name === 'darwin'; + } + + isLinux() { + return this.name === 'linux'; + } +} + +export const ALL_PLATFORMS = [ + new Platform('linux', 'x64', 'linux-x86_64'), + new Platform('linux', 'arm64', 'linux-aarch64'), + new Platform('darwin', 'x64', 'darwin-x86_64'), + new Platform('win32', 'x64', 'windows-x86_64'), +]; diff --git a/src/dev/build/lib/runner.test.ts b/src/dev/build/lib/runner.test.ts new file mode 100644 index 0000000000000..0e17f2f590e3d --- /dev/null +++ b/src/dev/build/lib/runner.test.ts @@ -0,0 +1,248 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + ToolingLog, + ToolingLogCollectingWriter, + createStripAnsiSerializer, + createRecursiveSerializer, +} from '@kbn/dev-utils'; +import { Config } from './config'; +import { createRunner } from './runner'; +import { Build } from './build'; +import { isErrorLogged, markErrorLogged } from './errors'; + +jest.mock('./version_info'); + +const testWriter = new ToolingLogCollectingWriter(); +const log = new ToolingLog(); +log.setWriters([testWriter]); + +expect.addSnapshotSerializer(createStripAnsiSerializer()); + +const STACK_TRACE = /(\│\s+)at .+ \(.+\)$/; +const isStackTrace = (x: any) => typeof x === 'string' && STACK_TRACE.test(x); + +expect.addSnapshotSerializer( + createRecursiveSerializer( + (v) => Array.isArray(v) && v.some(isStackTrace), + (v) => { + const start = v.findIndex(isStackTrace); + v[start] = v[start].replace(STACK_TRACE, '$1'); + while (isStackTrace(v[start + 1])) v.splice(start + 1, 1); + return v; + } + ) +); + +beforeEach(() => { + testWriter.messages.length = 0; + jest.clearAllMocks(); +}); + +const setup = async (opts: { buildDefaultDist: boolean; buildOssDist: boolean }) => { + const config = await Config.create({ + isRelease: true, + targetAllPlatforms: true, + versionQualifier: '-SNAPSHOT', + }); + + const run = createRunner({ + config, + log, + ...opts, + }); + + return { config, run }; +}; + +describe('buildOssDist = true, buildDefaultDist = true', () => { + it('runs global task once, passing config and log', async () => { + const { config, run } = await setup({ + buildDefaultDist: true, + buildOssDist: true, + }); + + const mock = jest.fn(); + + await run({ + global: true, + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(1); + expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build), expect.any(Build)]); + }); + + it('calls local tasks twice, passing each build', async () => { + const { config, run } = await setup({ + buildDefaultDist: true, + buildOssDist: true, + }); + + const mock = jest.fn(); + + await run({ + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(2); + expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build)); + }); +}); + +describe('just default dist', () => { + it('runs global task once, passing config and log', async () => { + const { config, run } = await setup({ + buildDefaultDist: true, + buildOssDist: false, + }); + + const mock = jest.fn(); + + await run({ + global: true, + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(1); + expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]); + }); + + it('calls local tasks once, passing the default build', async () => { + const { config, run } = await setup({ + buildDefaultDist: true, + buildOssDist: false, + }); + + const mock = jest.fn(); + + await run({ + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(1); + expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build)); + const [args] = mock.mock.calls; + const [, , build] = args; + if (build.isOss()) { + throw new Error('expected build to be the default dist, not the oss dist'); + } + }); +}); + +describe('just oss dist', () => { + it('runs global task once, passing config and log', async () => { + const { config, run } = await setup({ + buildDefaultDist: false, + buildOssDist: true, + }); + + const mock = jest.fn(); + + await run({ + global: true, + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(1); + expect(mock).toHaveBeenLastCalledWith(config, log, [expect.any(Build)]); + }); + + it('calls local tasks once, passing the oss build', async () => { + const { config, run } = await setup({ + buildDefaultDist: false, + buildOssDist: true, + }); + + const mock = jest.fn(); + + await run({ + description: 'foo', + run: mock, + }); + + expect(mock).toHaveBeenCalledTimes(1); + expect(mock).toHaveBeenCalledWith(config, log, expect.any(Build)); + const [args] = mock.mock.calls; + const [, , build] = args; + if (!build.isOss()) { + throw new Error('expected build to be the oss dist, not the default dist'); + } + }); +}); + +describe('task rejection', () => { + it('rejects, logs error, and marks error logged', async () => { + const { run } = await setup({ + buildDefaultDist: true, + buildOssDist: false, + }); + + const error = new Error('FOO'); + expect(isErrorLogged(error)).toBe(false); + + const promise = run({ + description: 'foo', + async run() { + throw error; + }, + }); + + await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`); + expect(testWriter.messages).toMatchInlineSnapshot(` + Array [ + " info [ kibana ] foo", + " │ERROR failure 0 sec", + " │ERROR Error: FOO", + " │ ", + "", + ] + `); + expect(isErrorLogged(error)).toBe(true); + }); + + it('just rethrows errors that have already been logged', async () => { + const { run } = await setup({ + buildDefaultDist: true, + buildOssDist: false, + }); + + const error = markErrorLogged(new Error('FOO')); + const promise = run({ + description: 'foo', + async run() { + throw error; + }, + }); + + await expect(promise).rejects.toThrowErrorMatchingInlineSnapshot(`"FOO"`); + expect(testWriter.messages).toMatchInlineSnapshot(` + Array [ + " info [ kibana ] foo", + "", + ] + `); + }); +}); diff --git a/src/dev/build/lib/runner.js b/src/dev/build/lib/runner.ts similarity index 72% rename from src/dev/build/lib/runner.js rename to src/dev/build/lib/runner.ts index 363cfbe97afad..6b7d175bb229a 100644 --- a/src/dev/build/lib/runner.js +++ b/src/dev/build/lib/runner.ts @@ -18,13 +18,33 @@ */ import chalk from 'chalk'; +import { ToolingLog } from '@kbn/dev-utils'; import { isErrorLogged, markErrorLogged } from './errors'; +import { Build } from './build'; +import { Config } from './config'; -import { createBuild } from './build'; +interface Options { + config: Config; + log: ToolingLog; + buildOssDist: boolean; + buildDefaultDist: boolean; +} + +export interface GlobalTask { + global: true; + description: string; + run(config: Config, log: ToolingLog, builds: Build[]): Promise; +} + +export interface Task { + global?: false; + description: string; + run(config: Config, log: ToolingLog, build: Build): Promise; +} -export function createRunner({ config, log, buildOssDist, buildDefaultDist }) { - async function execTask(desc, task, ...args) { +export function createRunner({ config, log, buildOssDist, buildDefaultDist }: Options) { + async function execTask(desc: string, task: Task | GlobalTask, lastArg: any) { log.info(desc); log.indent(4); @@ -37,11 +57,11 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) { }; try { - await task.run(config, log, ...args); + await task.run(config, log, lastArg); log.success(chalk.green('✓'), time()); } catch (error) { if (!isErrorLogged(error)) { - log.error('failure', time()); + log.error(`failure ${time()}`); log.error(error); markErrorLogged(error); } @@ -53,22 +73,12 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) { } } - const builds = []; + const builds: Build[] = []; if (buildDefaultDist) { - builds.push( - createBuild({ - config, - oss: false, - }) - ); + builds.push(new Build(config, false)); } if (buildOssDist) { - builds.push( - createBuild({ - config, - oss: true, - }) - ); + builds.push(new Build(config, true)); } /** @@ -76,11 +86,8 @@ export function createRunner({ config, log, buildOssDist, buildDefaultDist }) { * `config`: an object with methods for determining top-level config values, see `./config.js` * `log`: an instance of the `ToolingLog`, see `../../tooling_log/tooling_log.js` * `builds?`: If task does is not defined as `global: true` then it is called for each build and passed each one here. - * - * @param {Task} task - * @return {Promise} */ - return async function run(task) { + return async function run(task: Task | GlobalTask) { if (task.global) { await execTask(chalk`{dim [ global ]} ${task.description}`, task, builds); } else { diff --git a/src/dev/build/lib/version_info.test.ts b/src/dev/build/lib/version_info.test.ts new file mode 100644 index 0000000000000..1b0c71bf9220e --- /dev/null +++ b/src/dev/build/lib/version_info.test.ts @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import pkg from '../../../../package.json'; +import { getVersionInfo } from './version_info'; + +describe('isRelease = true', () => { + it('returns unchanged package.version, build sha, and build number', async () => { + const versionInfo = await getVersionInfo({ + isRelease: true, + pkg, + }); + + expect(versionInfo).toHaveProperty('buildVersion', pkg.version); + expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/)); + expect(versionInfo).toHaveProperty('buildNumber'); + expect(versionInfo.buildNumber).toBeGreaterThan(1000); + }); +}); + +describe('isRelease = false', () => { + it('returns snapshot version, build sha, and build number', async () => { + const versionInfo = await getVersionInfo({ + isRelease: false, + pkg, + }); + + expect(versionInfo).toHaveProperty('buildVersion', expect.stringContaining(pkg.version)); + expect(versionInfo).toHaveProperty('buildVersion', expect.stringMatching(/-SNAPSHOT$/)); + expect(versionInfo).toHaveProperty('buildSha', expect.stringMatching(/^[0-9a-f]{40}$/)); + expect(versionInfo).toHaveProperty('buildNumber'); + expect(versionInfo.buildNumber).toBeGreaterThan(1000); + }); +}); + +describe('versionQualifier', () => { + it('appends a version qualifier', async () => { + const versionInfo = await getVersionInfo({ + isRelease: true, + versionQualifier: 'beta55', + pkg, + }); + + expect(versionInfo).toHaveProperty('buildVersion', pkg.version + '-beta55'); + }); +}); diff --git a/src/dev/build/lib/version_info.js b/src/dev/build/lib/version_info.ts similarity index 84% rename from src/dev/build/lib/version_info.js rename to src/dev/build/lib/version_info.ts index 3a053afdbff8b..958112c524bac 100644 --- a/src/dev/build/lib/version_info.js +++ b/src/dev/build/lib/version_info.ts @@ -34,7 +34,19 @@ async function getBuildNumber() { return parseFloat(wc.stdout.trim()); } -export async function getVersionInfo({ isRelease, versionQualifier, pkg }) { +interface Options { + isRelease: boolean; + versionQualifier?: string; + pkg: { + version: string; + }; +} + +type ResolvedType> = T extends Promise ? X : never; + +export type VersionInfo = ResolvedType>; + +export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Options) { const buildVersion = pkg.version.concat( versionQualifier ? `-${versionQualifier}` : '', isRelease ? '' : '-SNAPSHOT' diff --git a/src/legacy/utils/watch_stdio_for_line.js b/src/dev/build/lib/watch_stdio_for_line.ts similarity index 83% rename from src/legacy/utils/watch_stdio_for_line.js rename to src/dev/build/lib/watch_stdio_for_line.ts index 01323b4d4e967..2322d017abc61 100644 --- a/src/legacy/utils/watch_stdio_for_line.js +++ b/src/dev/build/lib/watch_stdio_for_line.ts @@ -18,8 +18,13 @@ */ import { Transform } from 'stream'; +import { ExecaChildProcess } from 'execa'; -import { createPromiseFromStreams, createSplitStream, createMapStream } from './streams'; +import { + createPromiseFromStreams, + createSplitStream, + createMapStream, +} from '../../../legacy/utils/streams'; // creates a stream that skips empty lines unless they are followed by // another line, preventing the empty lines produced by splitStream @@ -27,7 +32,7 @@ function skipLastEmptyLineStream() { let skippedEmptyLine = false; return new Transform({ objectMode: true, - transform(line, enc, cb) { + transform(line, _, cb) { if (skippedEmptyLine) { this.push(''); skippedEmptyLine = false; @@ -37,14 +42,18 @@ function skipLastEmptyLineStream() { skippedEmptyLine = true; return cb(); } else { - return cb(null, line); + return cb(undefined, line); } }, }); } -export async function watchStdioForLine(proc, logFn, exitAfter) { - function onLogLine(line) { +export async function watchStdioForLine( + proc: ExecaChildProcess, + logFn: (line: string) => void, + exitAfter?: RegExp +) { + function onLogLine(line: string) { logFn(line); if (exitAfter && exitAfter.test(line)) { diff --git a/src/dev/build/tasks/bin/copy_bin_scripts_task.js b/src/dev/build/tasks/bin/copy_bin_scripts_task.ts similarity index 92% rename from src/dev/build/tasks/bin/copy_bin_scripts_task.js rename to src/dev/build/tasks/bin/copy_bin_scripts_task.ts index f620f12b17d88..d0ef0a58eebd5 100644 --- a/src/dev/build/tasks/bin/copy_bin_scripts_task.js +++ b/src/dev/build/tasks/bin/copy_bin_scripts_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { copyAll } from '../../lib'; +import { copyAll, Task } from '../../lib'; -export const CopyBinScriptsTask = { +export const CopyBinScripts: Task = { description: 'Copying bin scripts into platform-generic build directory', async run(config, log, build) { diff --git a/src/dev/build/tasks/os_packages/docker_generator/index.js b/src/dev/build/tasks/bin/index.ts similarity index 95% rename from src/dev/build/tasks/os_packages/docker_generator/index.js rename to src/dev/build/tasks/bin/index.ts index 9e0bbf51f9a56..dc30f626decc4 100644 --- a/src/dev/build/tasks/os_packages/docker_generator/index.js +++ b/src/dev/build/tasks/bin/index.ts @@ -17,4 +17,4 @@ * under the License. */ -export * from './run'; +export * from './copy_bin_scripts_task'; diff --git a/src/dev/build/tasks/build_kibana_platform_plugins.js b/src/dev/build/tasks/build_kibana_platform_plugins.ts similarity index 92% rename from src/dev/build/tasks/build_kibana_platform_plugins.js rename to src/dev/build/tasks/build_kibana_platform_plugins.ts index 153a3120f896f..08637677fcfbe 100644 --- a/src/dev/build/tasks/build_kibana_platform_plugins.js +++ b/src/dev/build/tasks/build_kibana_platform_plugins.ts @@ -25,9 +25,11 @@ import { reportOptimizerStats, } from '@kbn/optimizer'; -export const BuildKibanaPlatformPluginsTask = { +import { Task } from '../lib'; + +export const BuildKibanaPlatformPlugins: Task = { description: 'Building distributable versions of Kibana platform plugins', - async run(_, log, build) { + async run(config, log, build) { const optimizerConfig = OptimizerConfig.create({ repoRoot: build.resolvePath(), cache: false, diff --git a/src/dev/build/tasks/build_packages_task.js b/src/dev/build/tasks/build_packages_task.ts similarity index 97% rename from src/dev/build/tasks/build_packages_task.js rename to src/dev/build/tasks/build_packages_task.ts index b31855aa42dac..dd4e88f9c2b74 100644 --- a/src/dev/build/tasks/build_packages_task.js +++ b/src/dev/build/tasks/build_packages_task.ts @@ -18,7 +18,8 @@ */ import { buildProductionProjects } from '@kbn/pm'; -import { mkdirp } from '../lib'; + +import { mkdirp, Task } from '../lib'; /** * High-level overview of how we enable shared packages in production: @@ -66,8 +67,7 @@ import { mkdirp } from '../lib'; * in some way by Kibana itself in production, as it won't otherwise be * included in the production build. */ - -export const BuildPackagesTask = { +export const BuildPackages: Task = { description: 'Building distributable versions of packages', async run(config, log, build) { await mkdirp(config.resolveFromRepo('target')); diff --git a/src/dev/build/tasks/clean_tasks.js b/src/dev/build/tasks/clean_tasks.ts similarity index 92% rename from src/dev/build/tasks/clean_tasks.js rename to src/dev/build/tasks/clean_tasks.ts index ff5c3b3a73dd3..b519b17e591a3 100644 --- a/src/dev/build/tasks/clean_tasks.js +++ b/src/dev/build/tasks/clean_tasks.ts @@ -19,9 +19,9 @@ import minimatch from 'minimatch'; -import { deleteAll, deleteEmptyFolders, scanDelete } from '../lib'; +import { deleteAll, deleteEmptyFolders, scanDelete, Task, GlobalTask } from '../lib'; -export const CleanTask = { +export const Clean: GlobalTask = { global: true, description: 'Cleaning artifacts from previous builds', @@ -37,7 +37,7 @@ export const CleanTask = { }, }; -export const CleanPackagesTask = { +export const CleanPackages: Task = { description: 'Cleaning source for packages that are now installed in node_modules', async run(config, log, build) { @@ -45,7 +45,7 @@ export const CleanPackagesTask = { }, }; -export const CleanTypescriptTask = { +export const CleanTypescript: Task = { description: 'Cleaning typescript source files that have been transpiled to JS', async run(config, log, build) { @@ -59,11 +59,11 @@ export const CleanTypescriptTask = { }, }; -export const CleanExtraFilesFromModulesTask = { +export const CleanExtraFilesFromModules: Task = { description: 'Cleaning tests, examples, docs, etc. from node_modules', async run(config, log, build) { - const makeRegexps = (patterns) => + const makeRegexps = (patterns: string[]) => patterns.map((pattern) => minimatch.makeRe(pattern, { nocase: true })); const regularExpressions = makeRegexps([ @@ -181,7 +181,7 @@ export const CleanExtraFilesFromModulesTask = { }, }; -export const CleanExtraBinScriptsTask = { +export const CleanExtraBinScripts: Task = { description: 'Cleaning extra bin/* scripts from platform-specific builds', async run(config, log, build) { @@ -201,7 +201,7 @@ export const CleanExtraBinScriptsTask = { }, }; -export const CleanEmptyFoldersTask = { +export const CleanEmptyFolders: Task = { description: 'Cleaning all empty folders recursively', async run(config, log, build) { diff --git a/src/dev/build/tasks/copy_source_task.js b/src/dev/build/tasks/copy_source_task.ts similarity index 95% rename from src/dev/build/tasks/copy_source_task.js rename to src/dev/build/tasks/copy_source_task.ts index 52809449ba338..221c9162bd2a9 100644 --- a/src/dev/build/tasks/copy_source_task.js +++ b/src/dev/build/tasks/copy_source_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { copyAll } from '../lib'; +import { copyAll, Task } from '../lib'; -export const CopySourceTask = { +export const CopySource: Task = { description: 'Copying source into platform-generic build directory', async run(config, log, build) { diff --git a/src/dev/build/tasks/create_archives_sources_task.js b/src/dev/build/tasks/create_archives_sources_task.ts similarity index 95% rename from src/dev/build/tasks/create_archives_sources_task.js rename to src/dev/build/tasks/create_archives_sources_task.ts index 76f08bd3d2e4f..72f875b431933 100644 --- a/src/dev/build/tasks/create_archives_sources_task.js +++ b/src/dev/build/tasks/create_archives_sources_task.ts @@ -17,10 +17,10 @@ * under the License. */ -import { scanCopy } from '../lib'; +import { scanCopy, Task } from '../lib'; import { getNodeDownloadInfo } from './nodejs'; -export const CreateArchivesSourcesTask = { +export const CreateArchivesSources: Task = { description: 'Creating platform-specific archive source directories', async run(config, log, build) { await Promise.all( diff --git a/src/dev/build/tasks/create_archives_task.js b/src/dev/build/tasks/create_archives_task.ts similarity index 80% rename from src/dev/build/tasks/create_archives_task.js rename to src/dev/build/tasks/create_archives_task.ts index 541b9551dbc9b..3ffb1afef7469 100644 --- a/src/dev/build/tasks/create_archives_task.js +++ b/src/dev/build/tasks/create_archives_task.ts @@ -23,11 +23,11 @@ import { promisify } from 'util'; import { CiStatsReporter } from '@kbn/dev-utils'; -import { mkdirp, compress } from '../lib'; +import { mkdirp, compressTar, compressZip, Task } from '../lib'; const asyncStat = promisify(Fs.stat); -export const CreateArchivesTask = { +export const CreateArchives: Task = { description: 'Creating the archives for each platform', async run(config, log, build) { @@ -49,19 +49,16 @@ export const CreateArchivesTask = { path: destination, }); - await compress( - 'zip', - { - archiverOptions: { - zlib: { - level: 9, - }, + await compressZip({ + source, + destination, + archiverOptions: { + zlib: { + level: 9, }, - createRootDirectory: true, }, - source, - destination - ); + createRootDirectory: true, + }); break; case '.gz': @@ -70,20 +67,17 @@ export const CreateArchivesTask = { path: destination, }); - await compress( - 'tar', - { - archiverOptions: { - gzip: true, - gzipOptions: { - level: 9, - }, + await compressTar({ + source, + destination, + archiverOptions: { + gzip: true, + gzipOptions: { + level: 9, }, - createRootDirectory: true, }, - source, - destination - ); + createRootDirectory: true, + }); break; default: diff --git a/src/dev/build/tasks/create_empty_dirs_and_files_task.js b/src/dev/build/tasks/create_empty_dirs_and_files_task.ts similarity index 92% rename from src/dev/build/tasks/create_empty_dirs_and_files_task.js rename to src/dev/build/tasks/create_empty_dirs_and_files_task.ts index 6bf059ca9519b..a72c6a4598338 100644 --- a/src/dev/build/tasks/create_empty_dirs_and_files_task.js +++ b/src/dev/build/tasks/create_empty_dirs_and_files_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { mkdirp, write } from '../lib'; +import { mkdirp, write, Task } from '../lib'; -export const CreateEmptyDirsAndFilesTask = { +export const CreateEmptyDirsAndFiles: Task = { description: 'Creating some empty directories and files to prevent file-permission issues', async run(config, log, build) { diff --git a/src/dev/build/tasks/create_package_json_task.js b/src/dev/build/tasks/create_package_json_task.ts similarity index 92% rename from src/dev/build/tasks/create_package_json_task.js rename to src/dev/build/tasks/create_package_json_task.ts index e7a410b4c6350..5d7fdb9eae2f0 100644 --- a/src/dev/build/tasks/create_package_json_task.js +++ b/src/dev/build/tasks/create_package_json_task.ts @@ -19,9 +19,9 @@ import { copyWorkspacePackages } from '@kbn/pm'; -import { read, write } from '../lib'; +import { read, write, Task } from '../lib'; -export const CreatePackageJsonTask = { +export const CreatePackageJson: Task = { description: 'Creating build-ready version of package.json', async run(config, log, build) { @@ -38,7 +38,7 @@ export const CreatePackageJsonTask = { number: config.getBuildNumber(), sha: config.getBuildSha(), distributable: true, - release: config.isRelease(), + release: config.isRelease, }, repository: pkg.repository, engines: { @@ -59,7 +59,7 @@ export const CreatePackageJsonTask = { }, }; -export const RemovePackageJsonDepsTask = { +export const RemovePackageJsonDeps: Task = { description: 'Removing dependencies from package.json', async run(config, log, build) { @@ -74,7 +74,7 @@ export const RemovePackageJsonDepsTask = { }, }; -export const RemoveWorkspacesTask = { +export const RemoveWorkspaces: Task = { description: 'Remove workspace artifacts', async run(config, log, build) { diff --git a/src/dev/build/tasks/create_readme_task.js b/src/dev/build/tasks/create_readme_task.ts similarity index 93% rename from src/dev/build/tasks/create_readme_task.js rename to src/dev/build/tasks/create_readme_task.ts index 8d60dad9b5633..379ca45f43e26 100644 --- a/src/dev/build/tasks/create_readme_task.js +++ b/src/dev/build/tasks/create_readme_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { write, read } from '../lib'; +import { write, read, Task } from '../lib'; -export const CreateReadmeTask = { +export const CreateReadme: Task = { description: 'Creating README.md file', async run(config, log, build) { diff --git a/src/dev/build/tasks/index.js b/src/dev/build/tasks/index.ts similarity index 92% rename from src/dev/build/tasks/index.js rename to src/dev/build/tasks/index.ts index 0a3a67313d6a4..4c00e56faee6b 100644 --- a/src/dev/build/tasks/index.js +++ b/src/dev/build/tasks/index.ts @@ -27,7 +27,6 @@ export * from './create_archives_task'; export * from './create_empty_dirs_and_files_task'; export * from './create_package_json_task'; export * from './create_readme_task'; -export * from './install_chromium'; export * from './install_dependencies_task'; export * from './license_file_task'; export * from './nodejs'; @@ -41,3 +40,6 @@ export * from './transpile_scss_task'; export * from './uuid_verification_task'; export * from './verify_env_task'; export * from './write_sha_sums_task'; + +// @ts-expect-error this module can't be TS because it ends up pulling x-pack into Kibana +export { InstallChromium } from './install_chromium'; diff --git a/src/dev/build/tasks/install_chromium.js b/src/dev/build/tasks/install_chromium.js index c5878b23d43ae..3ae36d1615ccd 100644 --- a/src/dev/build/tasks/install_chromium.js +++ b/src/dev/build/tasks/install_chromium.js @@ -17,11 +17,12 @@ * under the License. */ +import { first } from 'rxjs/operators'; + // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install'; -import { first } from 'rxjs/operators'; -export const InstallChromiumTask = { +export const InstallChromium = { description: 'Installing Chromium', async run(config, log, build) { @@ -32,6 +33,7 @@ export const InstallChromiumTask = { log.info(`Installing Chromium for ${platform.getName()}-${platform.getArchitecture()}`); const { binaryPath$ } = installBrowser( + // TODO: https://github.com/elastic/kibana/issues/72496 log, build.resolvePathForPlatform(platform, 'x-pack/plugins/reporting/chromium'), platform.getName(), diff --git a/src/dev/build/tasks/install_dependencies_task.js b/src/dev/build/tasks/install_dependencies_task.ts similarity index 94% rename from src/dev/build/tasks/install_dependencies_task.js rename to src/dev/build/tasks/install_dependencies_task.ts index 5191899cd94d0..32fd23859456e 100644 --- a/src/dev/build/tasks/install_dependencies_task.js +++ b/src/dev/build/tasks/install_dependencies_task.ts @@ -19,7 +19,9 @@ import { Project } from '@kbn/pm'; -export const InstallDependenciesTask = { +import { Task } from '../lib'; + +export const InstallDependencies: Task = { description: 'Installing node_modules, including production builds of packages', async run(config, log, build) { diff --git a/src/dev/build/tasks/license_file_task.js b/src/dev/build/tasks/license_file_task.ts similarity index 94% rename from src/dev/build/tasks/license_file_task.js rename to src/dev/build/tasks/license_file_task.ts index 1a7c70738aa47..f1b65501d076f 100644 --- a/src/dev/build/tasks/license_file_task.js +++ b/src/dev/build/tasks/license_file_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { write, read } from '../lib'; +import { write, read, Task } from '../lib'; -export const UpdateLicenseFileTask = { +export const UpdateLicenseFile: Task = { description: 'Updating LICENSE.txt file', async run(config, log, build) { diff --git a/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js deleted file mode 100644 index c1764d06b43b3..0000000000000 --- a/src/dev/build/tasks/nodejs/__tests__/download_node_builds_task.js +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import * as NodeShasumsNS from '../node_shasums'; -import * as NodeDownloadInfoNS from '../node_download_info'; -import * as DownloadNS from '../../../lib/download'; // sinon can't stub '../../../lib' properly -import { DownloadNodeBuildsTask } from '../download_node_builds_task'; - -describe('src/dev/build/tasks/nodejs/download_node_builds_task', () => { - const sandbox = sinon.createSandbox(); - afterEach(() => { - sandbox.restore(); - }); - - function setup({ failOnUrl } = {}) { - const platforms = [{ getName: () => 'foo' }, { getName: () => 'bar' }]; - - const log = {}; - const config = { - getNodePlatforms: () => platforms, - getNodeVersion: () => 'nodeVersion', - }; - - sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => { - return { - url: `${platform.getName()}:url`, - downloadPath: `${platform.getName()}:downloadPath`, - downloadName: `${platform.getName()}:downloadName`, - }; - }); - - sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns({ - 'foo:downloadName': 'foo:sha256', - 'bar:downloadName': 'bar:sha256', - }); - - sandbox.stub(DownloadNS, 'download').callsFake(({ url }) => { - if (url === failOnUrl) { - throw new Error('Download failed for reasons'); - } - }); - - return { log, config }; - } - - it('downloads node builds for each platform', async () => { - const { log, config } = setup(); - - await DownloadNodeBuildsTask.run(config, log); - - sinon.assert.calledTwice(DownloadNS.download); - sinon.assert.calledWithExactly(DownloadNS.download, { - log, - url: 'foo:url', - sha256: 'foo:sha256', - destination: 'foo:downloadPath', - retries: 3, - }); - sinon.assert.calledWithExactly(DownloadNS.download, { - log, - url: 'bar:url', - sha256: 'bar:sha256', - destination: 'bar:downloadPath', - retries: 3, - }); - }); - - it('rejects if any download fails', async () => { - const { config, log } = setup({ failOnUrl: 'foo:url' }); - - try { - await DownloadNodeBuildsTask.run(config, log); - throw new Error('Expected DownloadNodeBuildsTask to reject'); - } catch (error) { - expect(error).to.have.property('message').be('Download failed for reasons'); - } - }); -}); diff --git a/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js deleted file mode 100644 index efb7aaa3a2209..0000000000000 --- a/src/dev/build/tasks/nodejs/__tests__/extract_node_builds_task.js +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import { resolve } from 'path'; -import * as NodeDownloadInfoNS from '../node_download_info'; -import * as FsNS from '../../../lib/fs'; -import { ExtractNodeBuildsTask } from '../extract_node_builds_task'; - -describe('src/dev/build/tasks/node_extract_node_builds_task', () => { - const sandbox = sinon.createSandbox(); - afterEach(() => { - sandbox.restore(); - }); - - it('copies downloadPath to extractDir/node.exe for windows platform', async () => { - sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({ - downloadPath: 'downloadPath', - extractDir: 'extractDir', - }); - - sandbox.stub(ExtractNodeBuildsTask, 'copyWindows'); - sandbox.stub(FsNS, 'untar'); - - const platform = { - isWindows: () => true, - }; - - const config = { - getNodePlatforms: () => [platform], - }; - - await ExtractNodeBuildsTask.run(config); - - sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo); - sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform); - - sinon.assert.calledOnce(ExtractNodeBuildsTask.copyWindows); - sinon.assert.calledWithExactly( - ExtractNodeBuildsTask.copyWindows, - 'downloadPath', - resolve('extractDir/node.exe') - ); - - sinon.assert.notCalled(FsNS.untar); - }); - - it('untars downloadPath to extractDir, stripping the top level of the archive, for non-windows platforms', async () => { - sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').returns({ - downloadPath: 'downloadPath', - extractDir: 'extractDir', - }); - - sandbox.stub(ExtractNodeBuildsTask, 'copyWindows'); - sandbox.stub(FsNS, 'untar'); - - const platform = { - isWindows: () => false, - }; - - const config = { - getNodePlatforms: () => [platform], - }; - - await ExtractNodeBuildsTask.run(config); - - sinon.assert.calledOnce(NodeDownloadInfoNS.getNodeDownloadInfo); - sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platform); - - sinon.assert.notCalled(ExtractNodeBuildsTask.copyWindows); - - sinon.assert.calledOnce(FsNS.untar); - sinon.assert.calledWithExactly(FsNS.untar, 'downloadPath', 'extractDir', { - strip: 1, - }); - }); -}); diff --git a/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js b/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js deleted file mode 100644 index a8f732a869d2d..0000000000000 --- a/src/dev/build/tasks/nodejs/__tests__/verify_existing_node_builds_task.js +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import * as NodeShasumsNS from '../node_shasums'; -import * as NodeDownloadInfoNS from '../node_download_info'; -import * as FsNS from '../../../lib/fs'; -import { VerifyExistingNodeBuildsTask } from '../verify_existing_node_builds_task'; - -describe('src/dev/build/tasks/nodejs/verify_existing_node_builds_task', () => { - const sandbox = sinon.createSandbox(); - afterEach(() => { - sandbox.restore(); - }); - - function setup({ nodeShasums } = {}) { - const platforms = [ - { getName: () => 'foo', getNodeArch: () => 'foo:nodeArch' }, - { getName: () => 'bar', getNodeArch: () => 'bar:nodeArch' }, - ]; - - const log = { success: sinon.stub() }; - const config = { - getNodePlatforms: () => platforms, - getNodeVersion: () => 'nodeVersion', - }; - - sandbox.stub(NodeDownloadInfoNS, 'getNodeDownloadInfo').callsFake((config, platform) => { - return { - url: `${platform.getName()}:url`, - downloadPath: `${platform.getName()}:downloadPath`, - downloadName: `${platform.getName()}:downloadName`, - }; - }); - - sandbox.stub(NodeShasumsNS, 'getNodeShasums').returns( - nodeShasums || { - 'foo:downloadName': 'foo:sha256', - 'bar:downloadName': 'bar:sha256', - } - ); - - sandbox.stub(FsNS, 'getFileHash').callsFake((path) => { - switch (path) { - case 'foo:downloadPath': - return 'foo:sha256'; - case 'bar:downloadPath': - return 'bar:sha256'; - } - }); - - return { log, config, platforms }; - } - - it('downloads node builds for each platform', async () => { - const { log, config, platforms } = setup(); - - await VerifyExistingNodeBuildsTask.run(config, log); - - sinon.assert.calledOnce(NodeShasumsNS.getNodeShasums); - - sinon.assert.calledTwice(NodeDownloadInfoNS.getNodeDownloadInfo); - sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[0]); - sinon.assert.calledWithExactly(NodeDownloadInfoNS.getNodeDownloadInfo, config, platforms[1]); - - sinon.assert.calledTwice(FsNS.getFileHash); - sinon.assert.calledWithExactly(FsNS.getFileHash, 'foo:downloadPath', 'sha256'); - sinon.assert.calledWithExactly(FsNS.getFileHash, 'bar:downloadPath', 'sha256'); - }); - - it('rejects if any download has an incorrect sha256', async () => { - const { config, log } = setup({ - nodeShasums: { - 'foo:downloadName': 'foo:sha256', - 'bar:downloadName': 'bar:invalid', - }, - }); - - try { - await VerifyExistingNodeBuildsTask.run(config, log); - throw new Error('Expected VerifyExistingNodeBuildsTask to reject'); - } catch (error) { - expect(error) - .to.have.property('message') - .be('Download at bar:downloadPath does not match expected checksum bar:sha256'); - } - }); -}); diff --git a/src/dev/build/tasks/nodejs/clean_node_builds_task.js b/src/dev/build/tasks/nodejs/clean_node_builds_task.ts similarity index 93% rename from src/dev/build/tasks/nodejs/clean_node_builds_task.js rename to src/dev/build/tasks/nodejs/clean_node_builds_task.ts index a34e65a394115..9deeb9f73de28 100644 --- a/src/dev/build/tasks/nodejs/clean_node_builds_task.js +++ b/src/dev/build/tasks/nodejs/clean_node_builds_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { deleteAll } from '../../lib'; +import { deleteAll, Task } from '../../lib'; -export const CleanNodeBuildsTask = { +export const CleanNodeBuilds: Task = { description: 'Cleaning npm from node', async run(config, log, build) { diff --git a/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts new file mode 100644 index 0000000000000..6f08c8aa69750 --- /dev/null +++ b/src/dev/build/tasks/nodejs/download_node_builds_task.test.ts @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + ToolingLog, + ToolingLogCollectingWriter, + createAnyInstanceSerializer, +} from '@kbn/dev-utils'; + +import { Config, Platform } from '../../lib'; +import { DownloadNodeBuilds } from './download_node_builds_task'; + +// import * as NodeShasumsNS from '../node_shasums'; +// import * as NodeDownloadInfoNS from '../node_download_info'; +// import * as DownloadNS from '../../../lib/download'; +// import { DownloadNodeBuilds } from '../download_node_builds_task'; +jest.mock('./node_shasums'); +jest.mock('./node_download_info'); +jest.mock('../../lib/download'); + +expect.addSnapshotSerializer(createAnyInstanceSerializer(ToolingLog)); + +const { getNodeDownloadInfo } = jest.requireMock('./node_download_info'); +const { getNodeShasums } = jest.requireMock('./node_shasums'); +const { download } = jest.requireMock('../../lib/download'); + +const log = new ToolingLog(); +const testWriter = new ToolingLogCollectingWriter(); +log.setWriters([testWriter]); + +beforeEach(() => { + testWriter.messages.length = 0; + jest.clearAllMocks(); +}); + +async function setup({ failOnUrl }: { failOnUrl?: string } = {}) { + const config = await Config.create({ + isRelease: true, + targetAllPlatforms: true, + }); + + getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => { + return { + url: `${platform.getName()}:url`, + downloadPath: `${platform.getName()}:downloadPath`, + downloadName: `${platform.getName()}:downloadName`, + }; + }); + + getNodeShasums.mockReturnValue({ + 'linux:downloadName': 'linux:sha256', + 'darwin:downloadName': 'darwin:sha256', + 'win32:downloadName': 'win32:sha256', + }); + + download.mockImplementation(({ url }: any) => { + if (url === failOnUrl) { + throw new Error('Download failed for reasons'); + } + }); + + return { config }; +} + +it('downloads node builds for each platform', async () => { + const { config } = await setup(); + + await DownloadNodeBuilds.run(config, log, []); + + expect(download.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + Object { + "destination": "linux:downloadPath", + "log": , + "retries": 3, + "sha256": "linux:sha256", + "url": "linux:url", + }, + ], + Array [ + Object { + "destination": "linux:downloadPath", + "log": , + "retries": 3, + "sha256": "linux:sha256", + "url": "linux:url", + }, + ], + Array [ + Object { + "destination": "darwin:downloadPath", + "log": , + "retries": 3, + "sha256": "darwin:sha256", + "url": "darwin:url", + }, + ], + Array [ + Object { + "destination": "win32:downloadPath", + "log": , + "retries": 3, + "sha256": "win32:sha256", + "url": "win32:url", + }, + ], + ] + `); + expect(testWriter.messages).toMatchInlineSnapshot(`Array []`); +}); + +it('rejects if any download fails', async () => { + const { config } = await setup({ failOnUrl: 'linux:url' }); + + await expect(DownloadNodeBuilds.run(config, log, [])).rejects.toMatchInlineSnapshot( + `[Error: Download failed for reasons]` + ); + expect(testWriter.messages).toMatchInlineSnapshot(`Array []`); +}); diff --git a/src/dev/build/tasks/nodejs/download_node_builds_task.js b/src/dev/build/tasks/nodejs/download_node_builds_task.ts similarity index 93% rename from src/dev/build/tasks/nodejs/download_node_builds_task.js rename to src/dev/build/tasks/nodejs/download_node_builds_task.ts index c0907e6c42a97..ad42ea11436f5 100644 --- a/src/dev/build/tasks/nodejs/download_node_builds_task.js +++ b/src/dev/build/tasks/nodejs/download_node_builds_task.ts @@ -17,11 +17,11 @@ * under the License. */ -import { download } from '../../lib'; +import { download, GlobalTask } from '../../lib'; import { getNodeShasums } from './node_shasums'; import { getNodeDownloadInfo } from './node_download_info'; -export const DownloadNodeBuildsTask = { +export const DownloadNodeBuilds: GlobalTask = { global: true, description: 'Downloading node.js builds for all platforms', async run(config, log) { diff --git a/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts new file mode 100644 index 0000000000000..94c421f7c9a62 --- /dev/null +++ b/src/dev/build/tasks/nodejs/extract_node_builds_task.test.ts @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + ToolingLog, + ToolingLogCollectingWriter, + createAbsolutePathSerializer, +} from '@kbn/dev-utils'; + +import { Config } from '../../lib'; +import { ExtractNodeBuilds } from './extract_node_builds_task'; + +jest.mock('../../lib/fs'); + +const Fs = jest.requireMock('../../lib/fs'); + +const log = new ToolingLog(); +const testWriter = new ToolingLogCollectingWriter(); +log.setWriters([testWriter]); + +expect.addSnapshotSerializer(createAbsolutePathSerializer()); + +async function setup() { + const config = await Config.create({ + isRelease: true, + targetAllPlatforms: true, + }); + + return { config }; +} + +beforeEach(() => { + testWriter.messages.length = 0; + jest.clearAllMocks(); +}); + +it('runs expected fs operations', async () => { + const { config } = await setup(); + + await ExtractNodeBuilds.run(config, log, []); + + const usedMethods = Object.fromEntries( + Object.entries(Fs) + .filter((entry): entry is [string, jest.Mock] => { + const [, mock] = entry; + + if (typeof mock !== 'function') { + return false; + } + + return (mock as jest.Mock).mock.calls.length > 0; + }) + .map(([name, mock]) => [name, mock.mock.calls]) + ); + + expect(usedMethods).toMatchInlineSnapshot(` + Object { + "copy": Array [ + Array [ + /.node_binaries/10.21.0/node.exe, + /.node_binaries/10.21.0/win32-x64/node.exe, + Object { + "clone": true, + }, + ], + ], + "untar": Array [ + Array [ + /.node_binaries/10.21.0/node-v10.21.0-linux-x64.tar.gz, + /.node_binaries/10.21.0/linux-x64, + Object { + "strip": 1, + }, + ], + Array [ + /.node_binaries/10.21.0/node-v10.21.0-linux-arm64.tar.gz, + /.node_binaries/10.21.0/linux-arm64, + Object { + "strip": 1, + }, + ], + Array [ + /.node_binaries/10.21.0/node-v10.21.0-darwin-x64.tar.gz, + /.node_binaries/10.21.0/darwin-x64, + Object { + "strip": 1, + }, + ], + ], + } + `); +}); diff --git a/src/dev/build/tasks/nodejs/extract_node_builds_task.js b/src/dev/build/tasks/nodejs/extract_node_builds_task.ts similarity index 56% rename from src/dev/build/tasks/nodejs/extract_node_builds_task.js rename to src/dev/build/tasks/nodejs/extract_node_builds_task.ts index caf0a389b4cc0..aaa3312c8ba3f 100644 --- a/src/dev/build/tasks/nodejs/extract_node_builds_task.js +++ b/src/dev/build/tasks/nodejs/extract_node_builds_task.ts @@ -17,39 +17,27 @@ * under the License. */ -import { dirname, resolve } from 'path'; -import fs from 'fs'; -import { promisify } from 'util'; +import Path from 'path'; -import { untar, mkdirp } from '../../lib'; +import { untar, GlobalTask, copy } from '../../lib'; import { getNodeDownloadInfo } from './node_download_info'; -const statAsync = promisify(fs.stat); -const copyFileAsync = promisify(fs.copyFile); - -export const ExtractNodeBuildsTask = { +export const ExtractNodeBuilds: GlobalTask = { global: true, description: 'Extracting node.js builds for all platforms', async run(config) { await Promise.all( config.getNodePlatforms().map(async (platform) => { const { downloadPath, extractDir } = getNodeDownloadInfo(config, platform); - // windows executable is not extractable, it's just an .exe file if (platform.isWindows()) { - const destination = resolve(extractDir, 'node.exe'); - return this.copyWindows(downloadPath, destination); + // windows executable is not extractable, it's just an .exe file + await copy(downloadPath, Path.resolve(extractDir, 'node.exe'), { + clone: true, + }); + } else { + await untar(downloadPath, extractDir, { strip: 1 }); } - - // all other downloads are tarballs - return untar(downloadPath, extractDir, { strip: 1 }); }) ); }, - async copyWindows(source, destination) { - // ensure source exists before creating destination directory - await statAsync(source); - await mkdirp(dirname(destination)); - // for performance reasons, do a copy-on-write by using the fs.constants.COPYFILE_FICLONE flag - return await copyFileAsync(source, destination, fs.constants.COPYFILE_FICLONE); - }, }; diff --git a/src/dev/build/tasks/nodejs/index.js b/src/dev/build/tasks/nodejs/index.js deleted file mode 100644 index e52dba73e4a96..0000000000000 --- a/src/dev/build/tasks/nodejs/index.js +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { getNodeDownloadInfo } from './node_download_info'; - -export { DownloadNodeBuildsTask } from './download_node_builds_task'; -export { ExtractNodeBuildsTask } from './extract_node_builds_task'; -export { VerifyExistingNodeBuildsTask } from './verify_existing_node_builds_task'; -export { CleanNodeBuildsTask } from './clean_node_builds_task'; diff --git a/src/dev/build/tasks/nodejs/index.ts b/src/dev/build/tasks/nodejs/index.ts new file mode 100644 index 0000000000000..8dd65418fb445 --- /dev/null +++ b/src/dev/build/tasks/nodejs/index.ts @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export * from './node_download_info'; +export * from './download_node_builds_task'; +export * from './extract_node_builds_task'; +export * from './verify_existing_node_builds_task'; +export * from './clean_node_builds_task'; diff --git a/src/dev/build/tasks/nodejs/node_download_info.js b/src/dev/build/tasks/nodejs/node_download_info.ts similarity index 92% rename from src/dev/build/tasks/nodejs/node_download_info.js rename to src/dev/build/tasks/nodejs/node_download_info.ts index 33ffd042d85a3..b2c62d6667fd4 100644 --- a/src/dev/build/tasks/nodejs/node_download_info.js +++ b/src/dev/build/tasks/nodejs/node_download_info.ts @@ -19,7 +19,9 @@ import { basename } from 'path'; -export function getNodeDownloadInfo(config, platform) { +import { Config, Platform } from '../../lib'; + +export function getNodeDownloadInfo(config: Config, platform: Platform) { const version = config.getNodeVersion(); const arch = platform.getNodeArch(); diff --git a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts new file mode 100644 index 0000000000000..f24b7ffc59c14 --- /dev/null +++ b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.test.ts @@ -0,0 +1,225 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + ToolingLog, + ToolingLogCollectingWriter, + createAnyInstanceSerializer, +} from '@kbn/dev-utils'; + +import { Config, Platform } from '../../lib'; +import { VerifyExistingNodeBuilds } from './verify_existing_node_builds_task'; + +jest.mock('./node_shasums'); +jest.mock('./node_download_info'); +jest.mock('../../lib/fs'); + +const { getNodeShasums } = jest.requireMock('./node_shasums'); +const { getNodeDownloadInfo } = jest.requireMock('./node_download_info'); +const { getFileHash } = jest.requireMock('../../lib/fs'); + +const log = new ToolingLog(); +const testWriter = new ToolingLogCollectingWriter(); +log.setWriters([testWriter]); + +expect.addSnapshotSerializer(createAnyInstanceSerializer(Config)); + +async function setup(actualShaSums?: Record) { + const config = await Config.create({ + isRelease: true, + targetAllPlatforms: true, + }); + + getNodeShasums.mockReturnValue( + Object.fromEntries( + config.getTargetPlatforms().map((platform) => { + return [`${platform.getName()}:${platform.getNodeArch()}:downloadName`, 'valid shasum']; + }) + ) + ); + + getNodeDownloadInfo.mockImplementation((_: Config, platform: Platform) => { + return { + downloadPath: `${platform.getName()}:${platform.getNodeArch()}:downloadPath`, + downloadName: `${platform.getName()}:${platform.getNodeArch()}:downloadName`, + }; + }); + + getFileHash.mockImplementation((downloadPath: string) => { + if (actualShaSums?.[downloadPath]) { + return actualShaSums[downloadPath]; + } + + return 'valid shasum'; + }); + + return { config }; +} + +beforeEach(() => { + testWriter.messages.length = 0; + jest.clearAllMocks(); +}); + +it('checks shasums for each downloaded node build', async () => { + const { config } = await setup(); + + await VerifyExistingNodeBuilds.run(config, log, []); + + expect(getNodeShasums).toMatchInlineSnapshot(` + [MockFunction] { + "calls": Array [ + Array [ + "10.21.0", + ], + ], + "results": Array [ + Object { + "type": "return", + "value": Object { + "darwin:darwin-x64:downloadName": "valid shasum", + "linux:linux-arm64:downloadName": "valid shasum", + "linux:linux-x64:downloadName": "valid shasum", + "win32:win32-x64:downloadName": "valid shasum", + }, + }, + ], + } + `); + expect(getNodeDownloadInfo).toMatchInlineSnapshot(` + [MockFunction] { + "calls": Array [ + Array [ + , + Platform { + "architecture": "x64", + "buildName": "linux-x86_64", + "name": "linux", + }, + ], + Array [ + , + Platform { + "architecture": "arm64", + "buildName": "linux-aarch64", + "name": "linux", + }, + ], + Array [ + , + Platform { + "architecture": "x64", + "buildName": "darwin-x86_64", + "name": "darwin", + }, + ], + Array [ + , + Platform { + "architecture": "x64", + "buildName": "windows-x86_64", + "name": "win32", + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": Object { + "downloadName": "linux:linux-x64:downloadName", + "downloadPath": "linux:linux-x64:downloadPath", + }, + }, + Object { + "type": "return", + "value": Object { + "downloadName": "linux:linux-arm64:downloadName", + "downloadPath": "linux:linux-arm64:downloadPath", + }, + }, + Object { + "type": "return", + "value": Object { + "downloadName": "darwin:darwin-x64:downloadName", + "downloadPath": "darwin:darwin-x64:downloadPath", + }, + }, + Object { + "type": "return", + "value": Object { + "downloadName": "win32:win32-x64:downloadName", + "downloadPath": "win32:win32-x64:downloadPath", + }, + }, + ], + } + `); + expect(getFileHash).toMatchInlineSnapshot(` + [MockFunction] { + "calls": Array [ + Array [ + "linux:linux-x64:downloadPath", + "sha256", + ], + Array [ + "linux:linux-arm64:downloadPath", + "sha256", + ], + Array [ + "darwin:darwin-x64:downloadPath", + "sha256", + ], + Array [ + "win32:win32-x64:downloadPath", + "sha256", + ], + ], + "results": Array [ + Object { + "type": "return", + "value": "valid shasum", + }, + Object { + "type": "return", + "value": "valid shasum", + }, + Object { + "type": "return", + "value": "valid shasum", + }, + Object { + "type": "return", + "value": "valid shasum", + }, + ], + } + `); +}); + +it('rejects if any download has an incorrect sha256', async () => { + const { config } = await setup({ + 'linux:linux-arm64:downloadPath': 'invalid shasum', + }); + + await expect( + VerifyExistingNodeBuilds.run(config, log, []) + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"Download at linux:linux-arm64:downloadPath does not match expected checksum invalid shasum"` + ); +}); diff --git a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts similarity index 93% rename from src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js rename to src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts index b320471fda33f..9ce0778d2d1f0 100644 --- a/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.js +++ b/src/dev/build/tasks/nodejs/verify_existing_node_builds_task.ts @@ -17,11 +17,11 @@ * under the License. */ -import { getFileHash } from '../../lib'; +import { getFileHash, GlobalTask } from '../../lib'; import { getNodeDownloadInfo } from './node_download_info'; import { getNodeShasums } from './node_shasums'; -export const VerifyExistingNodeBuildsTask = { +export const VerifyExistingNodeBuilds: GlobalTask = { global: true, description: 'Verifying previously downloaded node.js build for all platforms', async run(config, log) { diff --git a/src/dev/build/tasks/notice_file_task.js b/src/dev/build/tasks/notice_file_task.ts similarity index 95% rename from src/dev/build/tasks/notice_file_task.js rename to src/dev/build/tasks/notice_file_task.ts index 59369c7cb5a3b..6edb76d506bc0 100644 --- a/src/dev/build/tasks/notice_file_task.js +++ b/src/dev/build/tasks/notice_file_task.ts @@ -20,11 +20,11 @@ import { getInstalledPackages } from '../../npm'; import { LICENSE_OVERRIDES } from '../../license_checker'; -import { write } from '../lib'; +import { write, Task } from '../lib'; import { getNodeDownloadInfo } from './nodejs'; import { generateNoticeFromSource, generateBuildNoticeText } from '../../notice'; -export const CreateNoticeFileTask = { +export const CreateNoticeFile: Task = { description: 'Generating NOTICE.txt file', async run(config, log, build) { @@ -40,7 +40,7 @@ export const CreateNoticeFileTask = { log.info('Discovering installed packages'); const packages = await getInstalledPackages({ directory: build.resolvePath(), - dev: false, + includeDev: false, licenseOverrides: LICENSE_OVERRIDES, }); diff --git a/src/dev/build/tasks/optimize_task.js b/src/dev/build/tasks/optimize_task.ts similarity index 95% rename from src/dev/build/tasks/optimize_task.js rename to src/dev/build/tasks/optimize_task.ts index 16a7537b8ac9e..98979f376eacd 100644 --- a/src/dev/build/tasks/optimize_task.js +++ b/src/dev/build/tasks/optimize_task.ts @@ -17,10 +17,10 @@ * under the License. */ -import { deleteAll, copyAll, exec } from '../lib'; +import { deleteAll, copyAll, exec, Task } from '../lib'; import { getNodeDownloadInfo } from './nodejs'; -export const OptimizeBuildTask = { +export const OptimizeBuild: Task = { description: 'Running optimizer', async run(config, log, build) { diff --git a/src/dev/build/tasks/os_packages/create_os_package_tasks.js b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts similarity index 89% rename from src/dev/build/tasks/os_packages/create_os_package_tasks.js rename to src/dev/build/tasks/os_packages/create_os_package_tasks.ts index 6a00e681ab0ec..4580b95423d3d 100644 --- a/src/dev/build/tasks/os_packages/create_os_package_tasks.js +++ b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts @@ -17,10 +17,11 @@ * under the License. */ +import { Task } from '../../lib'; import { runFpm } from './run_fpm'; import { runDockerGenerator, runDockerGeneratorForUBI } from './docker_generator'; -export const CreateDebPackageTask = { +export const CreateDebPackage: Task = { description: 'Creating deb package', async run(config, log, build) { @@ -33,7 +34,7 @@ export const CreateDebPackageTask = { }, }; -export const CreateRpmPackageTask = { +export const CreateRpmPackage: Task = { description: 'Creating rpm package', async run(config, log, build) { @@ -41,7 +42,7 @@ export const CreateRpmPackageTask = { }, }; -export const CreateDockerPackageTask = { +export const CreateDockerPackage: Task = { description: 'Creating docker package', async run(config, log, build) { @@ -50,7 +51,7 @@ export const CreateDockerPackageTask = { }, }; -export const CreateDockerUbiPackageTask = { +export const CreateDockerUbiPackage: Task = { description: 'Creating docker ubi package', async run(config, log, build) { diff --git a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js index bbcb6dfeeb109..3f34a84057668 100644 --- a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js +++ b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.js @@ -18,7 +18,7 @@ */ import { resolve } from 'path'; -import { compress, copyAll, mkdirp, write } from '../../../lib'; +import { compressTar, copyAll, mkdirp, write } from '../../../lib'; import { dockerfileTemplate } from './templates'; export async function bundleDockerFiles(config, log, build, scope) { @@ -50,8 +50,7 @@ export async function bundleDockerFiles(config, log, build, scope) { // Compress dockerfiles dir created inside // docker build dir as output it as a target // on targets folder - await compress( - 'tar', + await compressTar( { archiverOptions: { gzip: true, diff --git a/src/dev/build/tasks/os_packages/docker_generator/index.ts b/src/dev/build/tasks/os_packages/docker_generator/index.ts new file mode 100644 index 0000000000000..78d2b197dc7b2 --- /dev/null +++ b/src/dev/build/tasks/os_packages/docker_generator/index.ts @@ -0,0 +1,21 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// @ts-expect-error not ts yet +export { runDockerGenerator, runDockerGeneratorForUBI } from './run'; diff --git a/src/dev/build/tasks/bin/index.js b/src/dev/build/tasks/os_packages/index.ts similarity index 92% rename from src/dev/build/tasks/bin/index.js rename to src/dev/build/tasks/os_packages/index.ts index e970ac5ec044b..439fde71d255f 100644 --- a/src/dev/build/tasks/bin/index.js +++ b/src/dev/build/tasks/os_packages/index.ts @@ -17,4 +17,4 @@ * under the License. */ -export { CopyBinScriptsTask } from './copy_bin_scripts_task'; +export * from './create_os_package_tasks'; diff --git a/src/dev/build/tasks/os_packages/run_fpm.js b/src/dev/build/tasks/os_packages/run_fpm.ts similarity index 91% rename from src/dev/build/tasks/os_packages/run_fpm.js rename to src/dev/build/tasks/os_packages/run_fpm.ts index eb77da0e70176..b5169ec3d43b6 100644 --- a/src/dev/build/tasks/os_packages/run_fpm.js +++ b/src/dev/build/tasks/os_packages/run_fpm.ts @@ -19,15 +19,23 @@ import { resolve } from 'path'; -import { exec } from '../../lib'; +import { ToolingLog } from '@kbn/dev-utils'; -export async function runFpm(config, log, build, type, pkgSpecificFlags) { +import { exec, Config, Build } from '../../lib'; + +export async function runFpm( + config: Config, + log: ToolingLog, + build: Build, + type: 'rpm' | 'deb', + pkgSpecificFlags: string[] +) { const linux = config.getPlatform('linux', 'x64'); const version = config.getBuildVersion(); - const resolveWithTrailingSlash = (...paths) => `${resolve(...paths)}/`; + const resolveWithTrailingSlash = (...paths: string[]) => `${resolve(...paths)}/`; - const fromBuild = (...paths) => build.resolvePathForPlatform(linux, ...paths); + const fromBuild = (...paths: string[]) => build.resolvePathForPlatform(linux, ...paths); const pickLicense = () => { if (build.isOss()) { diff --git a/src/dev/build/tasks/patch_native_modules_task.js b/src/dev/build/tasks/patch_native_modules_task.ts similarity index 82% rename from src/dev/build/tasks/patch_native_modules_task.js rename to src/dev/build/tasks/patch_native_modules_task.ts index c30d1fd774b55..b56d01b616462 100644 --- a/src/dev/build/tasks/patch_native_modules_task.js +++ b/src/dev/build/tasks/patch_native_modules_task.ts @@ -16,14 +16,30 @@ * specific language governing permissions and limitations * under the License. */ -import fs from 'fs'; + import path from 'path'; -import util from 'util'; -import { deleteAll, download, gunzip, untar } from '../lib'; + +import { ToolingLog } from '@kbn/dev-utils'; + +import { deleteAll, download, gunzip, untar, Task, Config, Build, Platform, read } from '../lib'; const DOWNLOAD_DIRECTORY = '.native_modules'; -const packages = [ +interface Package { + name: string; + version: string; + destinationPath: string; + extractMethod: string; + archives: Record< + string, + { + url: string; + sha256: string; + } + >; +} + +const packages: Package[] = [ { name: 're2', version: '1.15.4', @@ -46,16 +62,22 @@ const packages = [ }, ]; -async function getInstalledVersion(config, packageName) { +async function getInstalledVersion(config: Config, packageName: string) { const packageJSONPath = config.resolveFromRepo( path.join('node_modules', packageName, 'package.json') ); - const buffer = await util.promisify(fs.readFile)(packageJSONPath); - const packageJSON = JSON.parse(buffer); + const json = await read(packageJSONPath); + const packageJSON = JSON.parse(json); return packageJSON.version; } -async function patchModule(config, log, build, platform, pkg) { +async function patchModule( + config: Config, + log: ToolingLog, + build: Build, + platform: Platform, + pkg: Package +) { const installedVersion = await getInstalledVersion(config, pkg.name); if (installedVersion !== pkg.version) { throw new Error( @@ -89,7 +111,7 @@ async function patchModule(config, log, build, platform, pkg) { } } -export const PatchNativeModulesTask = { +export const PatchNativeModules: Task = { description: 'Patching platform-specific native modules', async run(config, log, build) { for (const pkg of packages) { diff --git a/src/dev/build/tasks/path_length_task.js b/src/dev/build/tasks/path_length_task.ts similarity index 95% rename from src/dev/build/tasks/path_length_task.js rename to src/dev/build/tasks/path_length_task.ts index 29ab9ce5a2499..d639217adc53b 100644 --- a/src/dev/build/tasks/path_length_task.js +++ b/src/dev/build/tasks/path_length_task.ts @@ -21,9 +21,9 @@ import { relative } from 'path'; import { tap, filter, map, toArray } from 'rxjs/operators'; -import { scan$ } from '../lib/scan'; +import { scan$, Task } from '../lib'; -export const PathLengthTask = { +export const PathLength: Task = { description: 'Checking Windows for paths > 200 characters', async run(config, log, build) { diff --git a/src/dev/build/tasks/transpile_babel_task.js b/src/dev/build/tasks/transpile_babel_task.ts similarity index 80% rename from src/dev/build/tasks/transpile_babel_task.js rename to src/dev/build/tasks/transpile_babel_task.ts index f476ead9183fe..a1e994587ce92 100644 --- a/src/dev/build/tasks/transpile_babel_task.js +++ b/src/dev/build/tasks/transpile_babel_task.ts @@ -17,15 +17,21 @@ * under the License. */ +import { pipeline } from 'stream'; +import { promisify } from 'util'; + +// @ts-expect-error @types/gulp-babel is outdated and doesn't work for gulp-babel v8 import gulpBabel from 'gulp-babel'; import vfs from 'vinyl-fs'; -import { createPromiseFromStreams } from '../../../legacy/utils'; +import { Task, Build } from '../lib'; + +const asyncPipeline = promisify(pipeline); -const transpileWithBabel = async (srcGlobs, build, presets) => { +const transpileWithBabel = async (srcGlobs: string[], build: Build, presets: string[]) => { const buildRoot = build.resolvePath(); - await createPromiseFromStreams([ + await asyncPipeline( vfs.src( srcGlobs.concat([ '!**/*.d.ts', @@ -44,11 +50,11 @@ const transpileWithBabel = async (srcGlobs, build, presets) => { presets, }), - vfs.dest(buildRoot), - ]); + vfs.dest(buildRoot) + ); }; -export const TranspileBabelTask = { +export const TranspileBabel: Task = { description: 'Transpiling sources with babel', async run(config, log, build) { diff --git a/src/dev/build/tasks/transpile_scss_task.js b/src/dev/build/tasks/transpile_scss_task.ts similarity index 89% rename from src/dev/build/tasks/transpile_scss_task.js rename to src/dev/build/tasks/transpile_scss_task.ts index d1c76d97c8853..e1b0bd0171c92 100644 --- a/src/dev/build/tasks/transpile_scss_task.js +++ b/src/dev/build/tasks/transpile_scss_task.ts @@ -17,9 +17,12 @@ * under the License. */ +import { Task } from '../lib'; + +// @ts-expect-error buildSass isn't TS yet import { buildSass } from '../../sass'; -export const TranspileScssTask = { +export const TranspileScss: Task = { description: 'Transpiling SCSS to CSS', async run(config, log, build) { await buildSass({ diff --git a/src/dev/build/tasks/uuid_verification_task.js b/src/dev/build/tasks/uuid_verification_task.ts similarity index 94% rename from src/dev/build/tasks/uuid_verification_task.js rename to src/dev/build/tasks/uuid_verification_task.ts index 32c9e73dba988..b65096690b681 100644 --- a/src/dev/build/tasks/uuid_verification_task.js +++ b/src/dev/build/tasks/uuid_verification_task.ts @@ -17,9 +17,9 @@ * under the License. */ -import { read } from '../lib'; +import { read, Task } from '../lib'; -export const UuidVerificationTask = { +export const UuidVerification: Task = { description: 'Verify that no UUID file is baked into the build', async run(config, log, build) { diff --git a/src/dev/build/tasks/verify_env_task.js b/src/dev/build/tasks/verify_env_task.ts similarity index 93% rename from src/dev/build/tasks/verify_env_task.js rename to src/dev/build/tasks/verify_env_task.ts index eb679411d7e38..975a620c1c540 100644 --- a/src/dev/build/tasks/verify_env_task.js +++ b/src/dev/build/tasks/verify_env_task.ts @@ -17,7 +17,9 @@ * under the License. */ -export const VerifyEnvTask = { +import { GlobalTask } from '../lib'; + +export const VerifyEnv: GlobalTask = { global: true, description: 'Verifying environment meets requirements', diff --git a/src/dev/build/tasks/write_sha_sums_task.js b/src/dev/build/tasks/write_sha_sums_task.ts similarity index 92% rename from src/dev/build/tasks/write_sha_sums_task.js rename to src/dev/build/tasks/write_sha_sums_task.ts index c44924bb9ce09..abf938cd150ab 100644 --- a/src/dev/build/tasks/write_sha_sums_task.js +++ b/src/dev/build/tasks/write_sha_sums_task.ts @@ -19,9 +19,9 @@ import globby from 'globby'; -import { getFileHash, write } from '../lib'; +import { getFileHash, write, GlobalTask } from '../lib'; -export const WriteShaSumsTask = { +export const WriteShaSums: GlobalTask = { global: true, description: 'Writing sha1sums of archives and packages in target directory', diff --git a/src/legacy/core_plugins/apm_oss/index.js b/src/legacy/core_plugins/apm_oss/index.js deleted file mode 100644 index b7ab6797c0de9..0000000000000 --- a/src/legacy/core_plugins/apm_oss/index.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; - -export default function apmOss(kibana) { - return new kibana.Plugin({ - id: 'apm_oss', - - config(Joi) { - return Joi.object({ - // enable plugin - enabled: Joi.boolean().default(true), - - // Kibana Index pattern - indexPattern: Joi.string().default('apm-*'), - - // ES Indices - sourcemapIndices: Joi.string().default('apm-*'), - errorIndices: Joi.string().default('apm-*'), - transactionIndices: Joi.string().default('apm-*'), - spanIndices: Joi.string().default('apm-*'), - metricsIndices: Joi.string().default('apm-*'), - onboardingIndices: Joi.string().default('apm-*'), - }).default(); - }, - - init(server) { - server.expose( - 'indexPatterns', - _.uniq( - [ - 'sourcemapIndices', - 'errorIndices', - 'transactionIndices', - 'spanIndices', - 'metricsIndices', - 'onboardingIndices', - ].map((type) => server.config().get(`apm_oss.${type}`)) - ) - ); - }, - }); -} diff --git a/src/legacy/core_plugins/apm_oss/package.json b/src/legacy/core_plugins/apm_oss/package.json deleted file mode 100644 index 4ca161f293e79..0000000000000 --- a/src/legacy/core_plugins/apm_oss/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "apm_oss", - "version": "kibana" -} diff --git a/src/legacy/core_plugins/kibana/public/__tests__/discover/legacy.ts b/src/legacy/core_plugins/kibana/public/__tests__/discover/legacy.ts deleted file mode 100644 index ecda2a8c15395..0000000000000 --- a/src/legacy/core_plugins/kibana/public/__tests__/discover/legacy.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { npSetup, npStart } from 'ui/new_platform'; -import { plugin } from '../../../../../../plugins/discover/public'; -import { coreMock } from '../../../../../../core/public/mocks'; -const context = coreMock.createPluginInitializerContext(); - -export const pluginInstance = plugin(context); -export const setup = pluginInstance.setup(npSetup.core, npSetup.plugins); -export const start = pluginInstance.start(npStart.core, npStart.plugins); diff --git a/src/legacy/core_plugins/kibana/public/__tests__/discover/row_headers.js b/src/legacy/core_plugins/kibana/public/__tests__/discover/row_headers.js deleted file mode 100644 index 29c301bf065c4..0000000000000 --- a/src/legacy/core_plugins/kibana/public/__tests__/discover/row_headers.js +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import angular from 'angular'; -import _ from 'lodash'; -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import { getFakeRow, getFakeRowVals } from 'fixtures/fake_row'; -import $ from 'jquery'; -import { pluginInstance } from './legacy'; -import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; -import { setScopedHistory } from '../../../../../../plugins/discover/public/kibana_services'; -import { createBrowserHistory } from 'history'; - -describe('Doc Table', function () { - let $parentScope; - let $scope; - - // Stub out a minimal mapping of 4 fields - let mapping; - - let fakeRowVals; - let stubFieldFormatConverter; - beforeEach(() => pluginInstance.initializeServices()); - beforeEach(() => pluginInstance.initializeInnerAngular()); - before(() => setScopedHistory(createBrowserHistory())); - beforeEach(ngMock.module('app/discover')); - beforeEach( - ngMock.inject(function ($rootScope, Private) { - $parentScope = $rootScope; - $parentScope.indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); - mapping = $parentScope.indexPattern.fields; - - // Stub `getConverterFor` for a field in the indexPattern to return mock data. - // Returns `val` if provided, otherwise generates fake data for the field. - fakeRowVals = getFakeRowVals('formatted', 0, mapping); - stubFieldFormatConverter = function ($root, field, val) { - const convertFn = (value, type, options) => { - if (val) { - return val; - } - const fieldName = _.get(options, 'field.name', null); - - return fakeRowVals[fieldName] || ''; - }; - - $root.indexPattern.fields.getByName(field).format.convert = convertFn; - $root.indexPattern.fields.getByName(field).format.getConverterFor = () => convertFn; - }; - }) - ); - - // Sets up the directive, take an element, and a list of properties to attach to the parent scope. - const init = function ($elem, props) { - ngMock.inject(function ($compile) { - _.assign($parentScope, props); - $compile($elem)($parentScope); - $elem.scope().$digest(); - $scope = $elem.isolateScope(); - }); - }; - - const destroy = function () { - $scope.$destroy(); - $parentScope.$destroy(); - }; - - // For testing column removing/adding for the header and the rows - const columnTests = function (elemType, parentElem) { - it('should create a time column if the timefield is defined', function () { - const childElems = parentElem.find(elemType); - expect(childElems.length).to.be(1); - }); - - it('should be able to add and remove columns', function () { - let childElems; - - stubFieldFormatConverter($parentScope, 'bytes'); - stubFieldFormatConverter($parentScope, 'request_body'); - - // Should include a column for toggling and the time column by default - $parentScope.columns = ['bytes']; - parentElem.scope().$digest(); - childElems = parentElem.find(elemType); - expect(childElems.length).to.be(2); - expect($(childElems[1]).text()).to.contain('bytes'); - - $parentScope.columns = ['bytes', 'request_body']; - parentElem.scope().$digest(); - childElems = parentElem.find(elemType); - expect(childElems.length).to.be(3); - expect($(childElems[2]).text()).to.contain('request_body'); - - $parentScope.columns = ['request_body']; - parentElem.scope().$digest(); - childElems = parentElem.find(elemType); - expect(childElems.length).to.be(2); - expect($(childElems[1]).text()).to.contain('request_body'); - }); - - it('should create only the toggle column if there is no timeField', function () { - delete parentElem.scope().indexPattern.timeFieldName; - parentElem.scope().$digest(); - - const childElems = parentElem.find(elemType); - expect(childElems.length).to.be(0); - }); - }; - - describe('kbnTableRow', function () { - const $elem = angular.element( - '' - ); - let row; - - beforeEach(function () { - row = getFakeRow(0, mapping); - - init($elem, { - row, - columns: [], - sorting: [], - filter: sinon.spy(), - maxLength: 50, - }); - }); - afterEach(function () { - destroy(); - }); - - describe('adding and removing columns', function () { - columnTests('[data-test-subj~="docTableField"]', $elem); - }); - - describe('details row', function () { - it('should be an empty tr by default', function () { - expect($elem.next().is('tr')).to.be(true); - expect($elem.next().text()).to.be(''); - }); - - it('should expand the detail row when the toggle arrow is clicked', function () { - $elem.children(':first-child').click(); - $scope.$digest(); - expect($elem.next().text()).to.not.be(''); - }); - - describe('expanded', function () { - let $details; - beforeEach(function () { - // Open the row - $scope.toggleRow(); - $scope.$digest(); - $details = $elem.next(); - }); - afterEach(function () { - // Close the row - $scope.toggleRow(); - $scope.$digest(); - }); - - it('should be a tr with something in it', function () { - expect($details.is('tr')).to.be(true); - expect($details.text()).to.not.be.empty(); - }); - }); - }); - }); - - describe('kbnTableRow meta', function () { - const $elem = angular.element( - '' - ); - let row; - - beforeEach(function () { - row = getFakeRow(0, mapping); - - init($elem, { - row: row, - columns: [], - sorting: [], - filtering: sinon.spy(), - maxLength: 50, - }); - - // Open the row - $scope.toggleRow(); - $scope.$digest(); - $elem.next(); - }); - - afterEach(function () { - destroy(); - }); - - /** this no longer works with the new plugin approach - it('should render even when the row source contains a field with the same name as a meta field', function () { - setTimeout(() => { - //this should be overridden by later changes - }, 100); - expect($details.find('tr').length).to.be(_.keys($parentScope.indexPattern.flattenHit($scope.row)).length); - }); */ - }); - - describe('row diffing', function () { - let $row; - let $scope; - let $root; - let $before; - - beforeEach( - ngMock.inject(function ($rootScope, $compile, Private) { - $root = $rootScope; - $root.row = getFakeRow(0, mapping); - $root.columns = ['_source']; - $root.sorting = []; - $root.filtering = sinon.spy(); - $root.maxLength = 50; - $root.mapping = mapping; - $root.indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); - - // Stub field format converters for every field in the indexPattern - $root.indexPattern.fields.forEach((f) => stubFieldFormatConverter($root, f.name)); - - $row = $('').attr({ - 'kbn-table-row': 'row', - columns: 'columns', - sorting: 'sorting', - filtering: 'filtering', - 'index-pattern': 'indexPattern', - }); - - $scope = $root.$new(); - $compile($row)($scope); - $root.$apply(); - - $before = $row.find('td'); - expect($before).to.have.length(3); - expect($before.eq(0).text().trim()).to.be(''); - expect($before.eq(1).text().trim()).to.match(/^time_formatted/); - }) - ); - - afterEach(function () { - $row.remove(); - }); - - it('handles a new column', function () { - $root.columns.push('bytes'); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(4); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after[2]).to.be($before[2]); - expect($after.eq(3).text().trim()).to.match(/^bytes_formatted/); - }); - - it('handles two new columns at once', function () { - $root.columns.push('bytes'); - $root.columns.push('request_body'); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(5); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after[2]).to.be($before[2]); - expect($after.eq(3).text().trim()).to.match(/^bytes_formatted/); - expect($after.eq(4).text().trim()).to.match(/^request_body_formatted/); - }); - - it('handles three new columns in odd places', function () { - $root.columns = ['@timestamp', 'bytes', '_source', 'request_body']; - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(6); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after.eq(2).text().trim()).to.match(/^@timestamp_formatted/); - expect($after.eq(3).text().trim()).to.match(/^bytes_formatted/); - expect($after[4]).to.be($before[2]); - expect($after.eq(5).text().trim()).to.match(/^request_body_formatted/); - }); - - it('handles a removed column', function () { - _.pull($root.columns, '_source'); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(2); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - }); - - it('handles two removed columns', function () { - // first add a column - $root.columns.push('@timestamp'); - $root.$apply(); - - const $mid = $row.find('td'); - expect($mid).to.have.length(4); - - $root.columns.pop(); - $root.columns.pop(); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(2); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - }); - - it('handles three removed random columns', function () { - // first add two column - $root.columns.push('@timestamp', 'bytes'); - $root.$apply(); - - const $mid = $row.find('td'); - expect($mid).to.have.length(5); - - $root.columns[0] = false; // _source - $root.columns[2] = false; // bytes - $root.columns = $root.columns.filter(Boolean); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(3); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after.eq(2).text().trim()).to.match(/^@timestamp_formatted/); - }); - - it('handles two columns with the same content', function () { - stubFieldFormatConverter($root, 'request_body', fakeRowVals.bytes); - - $root.columns.length = 0; - $root.columns.push('bytes'); - $root.columns.push('request_body'); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(4); - expect($after.eq(2).text().trim()).to.match(/^bytes_formatted/); - expect($after.eq(3).text().trim()).to.match(/^bytes_formatted/); - }); - - it('handles two columns swapping position', function () { - $root.columns.push('bytes'); - $root.$apply(); - - const $mid = $row.find('td'); - expect($mid).to.have.length(4); - - $root.columns.reverse(); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(4); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after[2]).to.be($mid[3]); - expect($after[3]).to.be($mid[2]); - }); - - it('handles four columns all reversing position', function () { - $root.columns.push('bytes', 'response', '@timestamp'); - $root.$apply(); - - const $mid = $row.find('td'); - expect($mid).to.have.length(6); - - $root.columns.reverse(); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(6); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after[2]).to.be($mid[5]); - expect($after[3]).to.be($mid[4]); - expect($after[4]).to.be($mid[3]); - expect($after[5]).to.be($mid[2]); - }); - - it('handles multiple columns with the same name', function () { - $root.columns.push('bytes', 'bytes', 'bytes'); - $root.$apply(); - - const $after = $row.find('td'); - expect($after).to.have.length(6); - expect($after[0]).to.be($before[0]); - expect($after[1]).to.be($before[1]); - expect($after[2]).to.be($before[2]); - expect($after.eq(3).text().trim()).to.match(/^bytes_formatted/); - expect($after.eq(4).text().trim()).to.match(/^bytes_formatted/); - expect($after.eq(5).text().trim()).to.match(/^bytes_formatted/); - }); - }); -}); diff --git a/src/legacy/server/kbn_server.d.ts b/src/legacy/server/kbn_server.d.ts index 40996500bfbe0..9bb091383ab13 100644 --- a/src/legacy/server/kbn_server.d.ts +++ b/src/legacy/server/kbn_server.d.ts @@ -43,7 +43,6 @@ import { import { LegacyConfig, ILegacyService, ILegacyInternals } from '../../core/server/legacy'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { UiPlugins } from '../../core/server/plugins'; -import { ApmOssPlugin } from '../core_plugins/apm_oss'; import { CallClusterWithRequest, ElasticsearchPlugin } from '../core_plugins/elasticsearch'; import { UsageCollectionSetup } from '../../plugins/usage_collection/server'; import { UiSettingsServiceFactoryOptions } from '../../legacy/ui/ui_settings/ui_settings_service_factory'; @@ -62,7 +61,6 @@ declare module 'hapi' { elasticsearch: ElasticsearchPlugin; kibana: any; spaces: any; - apm_oss: ApmOssPlugin; // add new plugin types here } diff --git a/src/legacy/utils/__tests__/watch_stdio_for_line.js b/src/legacy/utils/__tests__/watch_stdio_for_line.js deleted file mode 100644 index 32d61658c1114..0000000000000 --- a/src/legacy/utils/__tests__/watch_stdio_for_line.js +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import execa from 'execa'; -import stripAnsi from 'strip-ansi'; -import sinon from 'sinon'; - -import { watchStdioForLine } from '../watch_stdio_for_line'; - -describe('src/legacy/utils/watch_stdio_for_line', function () { - const sandbox = sinon.sandbox.create(); - afterEach(() => sandbox.reset()); - - const onLogLine = sandbox.stub(); - const logFn = (line) => onLogLine(stripAnsi(line)); - - it('calls logFn with log lines', async () => { - const proc = execa(process.execPath, ['-e', 'console.log("hi")']); - - await watchStdioForLine(proc, logFn); - - // log output of the process - sinon.assert.calledWithExactly(onLogLine, sinon.match(/hi/)); - }); - - it('send the proc SIGKILL if it logs a line matching exitAfter regexp', async function () { - // fixture proc will exit after 10 seconds if sigint not received, but the test won't fail - // unless we see the log line `SIGINT not received`, so we let the test take up to 30 seconds - // for potentially huge delays here and there - this.timeout(30000); - - const proc = execa(process.execPath, [require.resolve('./fixtures/log_on_sigint')]); - - await watchStdioForLine(proc, logFn, /listening for SIGINT/); - - sinon.assert.calledWithExactly(onLogLine, sinon.match(/listening for SIGINT/)); - sinon.assert.neverCalledWith(onLogLine, sinon.match(/SIGINT not received/)); - }); -}); diff --git a/src/legacy/utils/index.js b/src/legacy/utils/index.js index a4c0cdf958fc2..4274fb2e4901a 100644 --- a/src/legacy/utils/index.js +++ b/src/legacy/utils/index.js @@ -21,7 +21,6 @@ export { BinderBase } from './binder'; export { BinderFor } from './binder_for'; export { deepCloneWithBuffers } from './deep_clone_with_buffers'; export { unset } from './unset'; -export { watchStdioForLine } from './watch_stdio_for_line'; export { IS_KIBANA_DISTRIBUTABLE } from './artifact_type'; export { IS_KIBANA_RELEASE } from './artifact_type'; diff --git a/src/legacy/utils/streams/index.d.ts b/src/legacy/utils/streams/index.d.ts index 5ef39b292c685..470b5d9fa3505 100644 --- a/src/legacy/utils/streams/index.d.ts +++ b/src/legacy/utils/streams/index.d.ts @@ -17,7 +17,7 @@ * under the License. */ -import { Readable, Transform, Writable, TransformOptions } from 'stream'; +import { Readable, Writable, Transform, TransformOptions } from 'stream'; export function concatStreamProviders( sourceProviders: Array<() => Readable>, diff --git a/src/plugins/discover/public/application/angular/directives/fixed_scroll.test.js b/src/plugins/discover/public/application/angular/directives/fixed_scroll.test.js index 16293ca621e05..65255d6c0c4a4 100644 --- a/src/plugins/discover/public/application/angular/directives/fixed_scroll.test.js +++ b/src/plugins/discover/public/application/angular/directives/fixed_scroll.test.js @@ -230,6 +230,10 @@ describe('FixedScroll directive', function () { $to = els[names.to]; }); + afterAll(() => { + delete angular.element.prototype.scrollLeft; + }); + test('transfers the scrollLeft', function () { expect(spyJQueryScrollLeft.callCount).toBe(0); expect(spyJQLiteScrollLeft.callCount).toBe(0); diff --git a/src/plugins/discover/public/application/angular/doc_table/components/row_headers.test.js b/src/plugins/discover/public/application/angular/doc_table/components/row_headers.test.js new file mode 100644 index 0000000000000..b30b13b1f0b6e --- /dev/null +++ b/src/plugins/discover/public/application/angular/doc_table/components/row_headers.test.js @@ -0,0 +1,485 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import angular from 'angular'; +import 'angular-mocks'; +import 'angular-sanitize'; +import 'angular-route'; +import _ from 'lodash'; +import sinon from 'sinon'; +import { getFakeRow, getFakeRowVals } from 'fixtures/fake_row'; +import $ from 'jquery'; +import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; +import { setScopedHistory, setServices, setDocViewsRegistry } from '../../../../kibana_services'; +import { coreMock } from '../../../../../../../core/public/mocks'; +import { dataPluginMock } from '../../../../../../data/public/mocks'; +import { navigationPluginMock } from '../../../../../../navigation/public/mocks'; +import { getInnerAngularModule } from '../../../../get_inner_angular'; +import { createBrowserHistory } from 'history'; + +describe('Doc Table', () => { + const core = coreMock.createStart(); + const dataMock = dataPluginMock.createStartContract(); + let $parentScope; + let $scope; + let $elementScope; + let timeout; + let registry = []; + + // Stub out a minimal mapping of 4 fields + let mapping; + + let fakeRowVals; + let stubFieldFormatConverter; + beforeAll(() => setScopedHistory(createBrowserHistory())); + beforeEach(() => { + angular.element.prototype.slice = jest.fn(function (index) { + return $(this).slice(index); + }); + angular.element.prototype.filter = jest.fn(function (condition) { + return $(this).filter(condition); + }); + angular.element.prototype.toggle = jest.fn(function (name) { + return $(this).toggle(name); + }); + angular.element.prototype.is = jest.fn(function (name) { + return $(this).is(name); + }); + setServices({ + uiSettings: core.uiSettings, + filterManager: dataMock.query.filterManager, + }); + + setDocViewsRegistry({ + addDocView(view) { + registry.push(view); + }, + getDocViewsSorted() { + return registry; + }, + resetRegistry: () => { + registry = []; + }, + }); + + getInnerAngularModule( + 'app/discover', + core, + { + data: dataMock, + navigation: navigationPluginMock.createStartContract(), + }, + coreMock.createPluginInitializerContext() + ); + angular.mock.module('app/discover'); + }); + beforeEach( + angular.mock.inject(function ($rootScope, Private, $timeout) { + $parentScope = $rootScope; + timeout = $timeout; + $parentScope.indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); + mapping = $parentScope.indexPattern.fields; + + // Stub `getConverterFor` for a field in the indexPattern to return mock data. + // Returns `val` if provided, otherwise generates fake data for the field. + fakeRowVals = getFakeRowVals('formatted', 0, mapping); + stubFieldFormatConverter = function ($root, field, val) { + const convertFn = (value, type, options) => { + if (val) { + return val; + } + const fieldName = _.get(options, 'field.name', null); + + return fakeRowVals[fieldName] || ''; + }; + + $root.indexPattern.fields.getByName(field).format.convert = convertFn; + $root.indexPattern.fields.getByName(field).format.getConverterFor = () => convertFn; + }; + }) + ); + + afterEach(() => { + delete angular.element.prototype.slice; + delete angular.element.prototype.filter; + delete angular.element.prototype.toggle; + delete angular.element.prototype.is; + }); + + // Sets up the directive, take an element, and a list of properties to attach to the parent scope. + const init = function ($elem, props) { + angular.mock.inject(function ($compile) { + _.assign($parentScope, props); + const el = $compile($elem)($parentScope); + $elementScope = el.scope(); + el.scope().$digest(); + $scope = el.isolateScope(); + }); + }; + + const destroy = () => { + $scope.$destroy(); + $parentScope.$destroy(); + }; + + // For testing column removing/adding for the header and the rows + const columnTests = function (elemType, parentElem) { + test('should create a time column if the timefield is defined', () => { + const childElems = parentElem.find(elemType); + expect(childElems.length).toBe(1); + }); + + test('should be able to add and remove columns', () => { + let childElems; + + stubFieldFormatConverter($parentScope, 'bytes'); + stubFieldFormatConverter($parentScope, 'request_body'); + + // Should include a column for toggling and the time column by default + $parentScope.columns = ['bytes']; + $elementScope.$digest(); + childElems = parentElem.find(elemType); + expect(childElems.length).toBe(2); + expect($(childElems[1]).text()).toContain('bytes'); + + $parentScope.columns = ['bytes', 'request_body']; + $elementScope.$digest(); + childElems = parentElem.find(elemType); + expect(childElems.length).toBe(3); + expect($(childElems[2]).text()).toContain('request_body'); + + $parentScope.columns = ['request_body']; + $elementScope.$digest(); + childElems = parentElem.find(elemType); + expect(childElems.length).toBe(2); + expect($(childElems[1]).text()).toContain('request_body'); + }); + + test('should create only the toggle column if there is no timeField', () => { + delete $scope.indexPattern.timeFieldName; + $scope.$digest(); + timeout.flush(); + + const childElems = parentElem.find(elemType); + expect(childElems.length).toBe(0); + }); + }; + + describe('kbnTableRow', () => { + const $elem = $( + '' + ); + let row; + + beforeEach(() => { + row = getFakeRow(0, mapping); + + init($elem, { + row, + columns: [], + sorting: [], + filter: sinon.spy(), + maxLength: 50, + }); + }); + afterEach(() => { + destroy(); + }); + + describe('adding and removing columns', () => { + columnTests('[data-test-subj~="docTableField"]', $elem); + }); + + describe('details row', () => { + test('should be an empty tr by default', () => { + expect($elem.next().is('tr')).toBe(true); + expect($elem.next().text()).toBe(''); + }); + + test('should expand the detail row when the toggle arrow is clicked', () => { + $elem.children(':first-child').click(); + expect($elem.next().text()).not.toBe(''); + }); + + describe('expanded', () => { + let $details; + beforeEach(() => { + // Open the row + $scope.toggleRow(); + timeout.flush(); + $details = $elem.next(); + }); + afterEach(() => { + // Close the row + $scope.toggleRow(); + }); + + test('should be a tr with something in it', () => { + expect($details.is('tr')).toBe(true); + expect($details.text()).toBeTruthy(); + }); + }); + }); + }); + + describe('kbnTableRow meta', () => { + const $elem = angular.element( + '' + ); + let row; + + beforeEach(() => { + row = getFakeRow(0, mapping); + + init($elem, { + row: row, + columns: [], + sorting: [], + filtering: sinon.spy(), + maxLength: 50, + }); + + // Open the row + $scope.toggleRow(); + $scope.$digest(); + timeout.flush(); + $elem.next(); + }); + + afterEach(() => { + destroy(); + }); + + /** this no longer works with the new plugin approach + test('should render even when the row source contains a field with the same name as a meta field', () => { + setTimeout(() => { + //this should be overridden by later changes + }, 100); + expect($details.find('tr').length).toBe(_.keys($parentScope.indexPattern.flattenHit($scope.row)).length); + }); */ + }); + + describe('row diffing', () => { + let $row; + let $scope; + let $root; + let $before; + + beforeEach( + angular.mock.inject(function ($rootScope, $compile, Private) { + $root = $rootScope; + $root.row = getFakeRow(0, mapping); + $root.columns = ['_source']; + $root.sorting = []; + $root.filtering = sinon.spy(); + $root.maxLength = 50; + $root.mapping = mapping; + $root.indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); + + // Stub field format converters for every field in the indexPattern + $root.indexPattern.fields.forEach((f) => stubFieldFormatConverter($root, f.name)); + + $row = $('').attr({ + 'kbn-table-row': 'row', + columns: 'columns', + sorting: 'sorting', + filtering: 'filtering', + 'index-pattern': 'indexPattern', + }); + + $scope = $root.$new(); + $compile($row)($scope); + $root.$apply(); + + $before = $row.find('td'); + expect($before).toHaveLength(3); + expect($before.eq(0).text().trim()).toBe(''); + expect($before.eq(1).text().trim()).toMatch(/^time_formatted/); + }) + ); + + afterEach(() => { + $row.remove(); + }); + + test('handles a new column', () => { + $root.columns.push('bytes'); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(4); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after[2].outerHTML).toBe($before[2].outerHTML); + expect($after.eq(3).text().trim()).toMatch(/^bytes_formatted/); + }); + + test('handles two new columns at once', () => { + $root.columns.push('bytes'); + $root.columns.push('request_body'); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(5); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after[2].outerHTML).toBe($before[2].outerHTML); + expect($after.eq(3).text().trim()).toMatch(/^bytes_formatted/); + expect($after.eq(4).text().trim()).toMatch(/^request_body_formatted/); + }); + + test('handles three new columns in odd places', () => { + $root.columns = ['@timestamp', 'bytes', '_source', 'request_body']; + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(6); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after.eq(2).text().trim()).toMatch(/^@timestamp_formatted/); + expect($after.eq(3).text().trim()).toMatch(/^bytes_formatted/); + expect($after[4].outerHTML).toBe($before[2].outerHTML); + expect($after.eq(5).text().trim()).toMatch(/^request_body_formatted/); + }); + + test('handles a removed column', () => { + _.pull($root.columns, '_source'); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(2); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + }); + + test('handles two removed columns', () => { + // first add a column + $root.columns.push('@timestamp'); + $root.$apply(); + + const $mid = $row.find('td'); + expect($mid).toHaveLength(4); + + $root.columns.pop(); + $root.columns.pop(); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(2); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + }); + + test('handles three removed random columns', () => { + // first add two column + $root.columns.push('@timestamp', 'bytes'); + $root.$apply(); + + const $mid = $row.find('td'); + expect($mid).toHaveLength(5); + + $root.columns[0] = false; // _source + $root.columns[2] = false; // bytes + $root.columns = $root.columns.filter(Boolean); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(3); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after.eq(2).text().trim()).toMatch(/^@timestamp_formatted/); + }); + + test('handles two columns with the same content', () => { + stubFieldFormatConverter($root, 'request_body', fakeRowVals.bytes); + + $root.columns.length = 0; + $root.columns.push('bytes'); + $root.columns.push('request_body'); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(4); + expect($after.eq(2).text().trim()).toMatch(/^bytes_formatted/); + expect($after.eq(3).text().trim()).toMatch(/^bytes_formatted/); + }); + + test('handles two columns swapping position', () => { + $root.columns.push('bytes'); + $root.$apply(); + + const $mid = $row.find('td'); + expect($mid).toHaveLength(4); + + $root.columns.reverse(); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(4); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after[2].outerHTML).toBe($mid[3].outerHTML); + expect($after[3].outerHTML).toBe($mid[2].outerHTML); + }); + + test('handles four columns all reversing position', () => { + $root.columns.push('bytes', 'response', '@timestamp'); + $root.$apply(); + + const $mid = $row.find('td'); + expect($mid).toHaveLength(6); + + $root.columns.reverse(); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(6); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after[2].outerHTML).toBe($mid[5].outerHTML); + expect($after[3].outerHTML).toBe($mid[4].outerHTML); + expect($after[4].outerHTML).toBe($mid[3].outerHTML); + expect($after[5].outerHTML).toBe($mid[2].outerHTML); + }); + + test('handles multiple columns with the same name', () => { + $root.columns.push('bytes', 'bytes', 'bytes'); + $root.$apply(); + + const $after = $row.find('td'); + expect($after).toHaveLength(6); + expect($after[0].outerHTML).toBe($before[0].outerHTML); + expect($after[1].outerHTML).toBe($before[1].outerHTML); + expect($after[2].outerHTML).toBe($before[2].outerHTML); + expect($after.eq(3).text().trim()).toMatch(/^bytes_formatted/); + expect($after.eq(4).text().trim()).toMatch(/^bytes_formatted/); + expect($after.eq(5).text().trim()).toMatch(/^bytes_formatted/); + }); + }); +}); diff --git a/src/legacy/core_plugins/kibana/public/__tests__/discover/doc_table.js b/src/plugins/discover/public/application/angular/doc_table/doc_table.test.js similarity index 52% rename from src/legacy/core_plugins/kibana/public/__tests__/discover/doc_table.js rename to src/plugins/discover/public/application/angular/doc_table/doc_table.test.js index 504b00808718b..9722981df42b1 100644 --- a/src/legacy/core_plugins/kibana/public/__tests__/discover/doc_table.js +++ b/src/plugins/discover/public/application/angular/doc_table/doc_table.test.js @@ -17,15 +17,18 @@ * under the License. */ import angular from 'angular'; -import expect from '@kbn/expect'; import _ from 'lodash'; -import ngMock from 'ng_mock'; -import 'ui/private'; -import { pluginInstance } from './legacy'; +import 'angular-mocks'; +import 'angular-sanitize'; +import 'angular-route'; +import { createBrowserHistory } from 'history'; import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; import hits from 'fixtures/real_hits'; -import { setScopedHistory } from '../../../../../../plugins/discover/public/kibana_services'; -import { createBrowserHistory } from 'history'; +import { coreMock } from '../../../../../../core/public/mocks'; +import { dataPluginMock } from '../../../../../data/public/mocks'; +import { navigationPluginMock } from '../../../../../navigation/public/mocks'; +import { setScopedHistory, setServices } from '../../../kibana_services'; +import { getInnerAngularModule } from '../../../get_inner_angular'; let $parentScope; @@ -36,7 +39,7 @@ let $timeout; let indexPattern; const init = function ($elem, props) { - ngMock.inject(function ($rootScope, $compile, _$timeout_) { + angular.mock.inject(function ($rootScope, $compile, _$timeout_) { $timeout = _$timeout_; $parentScope = $rootScope; _.assign($parentScope, props); @@ -44,7 +47,7 @@ const init = function ($elem, props) { $compile($elem)($parentScope); // I think the prereq requires this? - $timeout(function () { + $timeout(() => { $elem.scope().$digest(); }, 0); @@ -52,19 +55,40 @@ const init = function ($elem, props) { }); }; -const destroy = function () { +const destroy = () => { $scope.$destroy(); $parentScope.$destroy(); }; -describe('docTable', function () { +describe('docTable', () => { + const core = coreMock.createStart(); let $elem; - before(() => setScopedHistory(createBrowserHistory())); - beforeEach(() => pluginInstance.initializeInnerAngular()); - beforeEach(() => pluginInstance.initializeServices()); - beforeEach(ngMock.module('app/discover')); - beforeEach(function () { + beforeAll(() => setScopedHistory(createBrowserHistory())); + beforeEach(() => { + angular.element.prototype.slice = jest.fn(() => { + return null; + }); + angular.element.prototype.filter = jest.fn(() => { + return { + remove: jest.fn(), + }; + }); + setServices({ + uiSettings: core.uiSettings, + }); + getInnerAngularModule( + 'app/discover', + core, + { + data: dataPluginMock.createStartContract(), + navigation: navigationPluginMock.createStartContract(), + }, + coreMock.createPluginInitializerContext() + ); + angular.mock.module('app/discover'); + }); + beforeEach(() => { $elem = angular.element(` `); - ngMock.inject(function (Private) { + angular.mock.inject(function (Private) { indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); }); init($elem, { @@ -87,34 +111,36 @@ describe('docTable', function () { $scope.$digest(); }); - afterEach(function () { + afterEach(() => { + delete angular.element.prototype.slice; + delete angular.element.prototype.filter; destroy(); }); - it('should compile', function () { - expect($elem.text()).to.not.be.empty(); + test('should compile', () => { + expect($elem.text()).toBeTruthy(); }); - it('should have an addRows function that increases the row count', function () { - expect($scope.addRows).to.be.a(Function); + test('should have an addRows function that increases the row count', () => { + expect($scope.addRows).toBeInstanceOf(Function); $scope.$digest(); - expect($scope.limit).to.be(50); + expect($scope.limit).toBe(50); $scope.addRows(); - expect($scope.limit).to.be(100); + expect($scope.limit).toBe(100); }); - it('should reset the row limit when results are received', function () { + test('should reset the row limit when results are received', () => { $scope.limit = 100; - expect($scope.limit).to.be(100); + expect($scope.limit).toBe(100); $scope.hits = [...hits]; $scope.$digest(); - expect($scope.limit).to.be(50); + expect($scope.limit).toBe(50); }); - it('should have a header and a table element', function () { + test('should have a header and a table element', () => { $scope.$digest(); - expect($elem.find('thead').length).to.be(1); - expect($elem.find('table').length).to.be(1); + expect($elem.find('thead').length).toBe(1); + expect($elem.find('table').length).toBe(1); }); }); diff --git a/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx new file mode 100644 index 0000000000000..aa575cd64944c --- /dev/null +++ b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/global_flyout.tsx @@ -0,0 +1,166 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { + createContext, + useContext, + useState, + useCallback, + useMemo, + useEffect, + useRef, +} from 'react'; +import { EuiFlyout } from '@elastic/eui'; + +interface Context { + addContent:

(content: Content

) => void; + removeContent: (contentId: string) => void; + closeFlyout: () => void; +} + +interface Content

{ + id: string; + Component: React.FunctionComponent

; + props?: P; + flyoutProps?: { [key: string]: any }; + cleanUpFunc?: () => void; +} + +const FlyoutMultiContentContext = createContext(undefined); + +const DEFAULT_FLYOUT_PROPS = { + 'data-test-subj': 'flyout', + size: 'm' as 'm', + maxWidth: 500, +}; + +export const GlobalFlyoutProvider: React.FC = ({ children }) => { + const [showFlyout, setShowFlyout] = useState(false); + const [activeContent, setActiveContent] = useState | undefined>(undefined); + + const { id, Component, props, flyoutProps } = activeContent ?? {}; + + const addContent: Context['addContent'] = useCallback((content) => { + setActiveContent((prev) => { + if (prev !== undefined) { + if (prev.id !== content.id && prev.cleanUpFunc) { + // Clean up anything from the content about to be removed + prev.cleanUpFunc(); + } + } + return content; + }); + + setShowFlyout(true); + }, []); + + const closeFlyout: Context['closeFlyout'] = useCallback(() => { + setActiveContent(undefined); + setShowFlyout(false); + }, []); + + const removeContent: Context['removeContent'] = useCallback( + (contentId: string) => { + if (contentId === id) { + closeFlyout(); + } + }, + [id, closeFlyout] + ); + + const mergedFlyoutProps = useMemo(() => { + return { + ...DEFAULT_FLYOUT_PROPS, + onClose: closeFlyout, + ...flyoutProps, + }; + }, [flyoutProps, closeFlyout]); + + const context: Context = { + addContent, + removeContent, + closeFlyout, + }; + + const ContentFlyout = showFlyout && Component !== undefined ? Component : null; + + return ( + + <> + {children} + {ContentFlyout && ( + + + + )} + + + ); +}; + +export const useGlobalFlyout = () => { + const ctx = useContext(FlyoutMultiContentContext); + + if (ctx === undefined) { + throw new Error('useGlobalFlyout must be used within a '); + } + + const isMounted = useRef(false); + /** + * A component can add one or multiple content to the flyout + * during its lifecycle. When it unmounts, we will remove + * all those content added to the flyout. + */ + const contents = useRef | undefined>(undefined); + const { removeContent, addContent: addContentToContext } = ctx; + + useEffect(() => { + isMounted.current = true; + + return () => { + isMounted.current = false; + }; + }, []); + + const getContents = useCallback(() => { + if (contents.current === undefined) { + contents.current = new Set(); + } + return contents.current; + }, []); + + const addContent: Context['addContent'] = useCallback( + (content) => { + getContents().add(content.id); + return addContentToContext(content); + }, + [getContents, addContentToContext] + ); + + useEffect(() => { + return () => { + if (!isMounted.current) { + // When the component unmounts, remove all the content it has added to the flyout + Array.from(getContents()).forEach(removeContent); + } + }; + }, [removeContent]); + + return { ...ctx, addContent }; +}; diff --git a/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/index.ts b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/index.ts new file mode 100644 index 0000000000000..c49692547fb25 --- /dev/null +++ b/src/plugins/es_ui_shared/__packages_do_not_import__/global_flyout/index.ts @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { GlobalFlyoutProvider, useGlobalFlyout } from './global_flyout'; diff --git a/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard.tsx b/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard.tsx index cdb332e9e9130..642a21eae50e9 100644 --- a/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard.tsx +++ b/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard.tsx @@ -27,13 +27,14 @@ import { } from './form_wizard_context'; import { FormWizardNav, NavTexts } from './form_wizard_nav'; -interface Props extends ProviderProps { +interface Props extends ProviderProps { isSaving?: boolean; apiError: JSX.Element | null; texts?: Partial; + rightContentNav?: JSX.Element | null | ((stepId: S) => JSX.Element | null); } -export function FormWizard({ +export function FormWizard({ texts, defaultActiveStep, defaultValue, @@ -43,7 +44,8 @@ export function FormWizard({ onSave, onChange, children, -}: Props) { + rightContentNav, +}: Props) { return ( defaultValue={defaultValue} @@ -53,7 +55,14 @@ export function FormWizard({ defaultActiveStep={defaultActiveStep} > - {({ activeStepIndex, lastStep, steps, isCurrentStepValid, navigateToStep }) => { + {({ + activeStepIndex, + lastStep, + steps, + isCurrentStepValid, + navigateToStep, + activeStepId, + }) => { const stepsRequiredArray = Object.values(steps).map( (step) => Boolean(step.isRequired) && step.isComplete === false ); @@ -95,6 +104,13 @@ export function FormWizard({ }; }); + const getRightContentNav = () => { + if (typeof rightContentNav === 'function') { + return rightContentNav(activeStepId); + } + return rightContentNav; + }; + const onBack = () => { const prevStep = activeStepIndex - 1; navigateToStep(prevStep); @@ -129,6 +145,7 @@ export function FormWizard({ onBack={onBack} onNext={onNext} texts={texts} + getRightContent={getRightContentNav} /> ); diff --git a/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard_nav.tsx b/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard_nav.tsx index 3e0e9cf897b5d..0af99e8bce35a 100644 --- a/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard_nav.tsx +++ b/src/plugins/es_ui_shared/public/forms/form_wizard/form_wizard_nav.tsx @@ -29,6 +29,7 @@ interface Props { isSaving?: boolean; isStepValid?: boolean; texts?: Partial; + getRightContent?: () => JSX.Element | null | undefined; } export interface NavTexts { @@ -53,6 +54,7 @@ export const FormWizardNav = ({ onBack, onNext, texts, + getRightContent, }: Props) => { const isLastStep = activeStepIndex === lastStep; const labels = { @@ -66,6 +68,8 @@ export const FormWizardNav = ({ : labels.save : labels.next; + const rightContent = getRightContent !== undefined ? getRightContent() : undefined; + return ( @@ -100,6 +104,8 @@ export const FormWizardNav = ({ + + {rightContent && {rightContent}} ); }; diff --git a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts index 8d470f6454b0e..2e7c91a26e1fc 100644 --- a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts +++ b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts @@ -94,7 +94,7 @@ export function useMultiContent({ const activeContentData: Partial = {}; for (const [id, _content] of Object.entries(contents.current)) { - if (validation.contents[id as keyof T]) { + if (validation.contents[id as keyof T] !== false) { const contentData = (_content as Content).getData(); // Replace the getData() handler with the cached value @@ -161,7 +161,7 @@ export function useMultiContent({ ); /** - * Validate the multi-content active content(s) in the DOM + * Validate the content(s) currently in the DOM */ const validate = useCallback(async () => { if (Object.keys(contents.current).length === 0) { diff --git a/src/dev/build/tasks/os_packages/index.js b/src/plugins/es_ui_shared/public/global_flyout/index.ts similarity index 85% rename from src/dev/build/tasks/os_packages/index.js rename to src/plugins/es_ui_shared/public/global_flyout/index.ts index 82626c47b6087..e876594337c1e 100644 --- a/src/dev/build/tasks/os_packages/index.js +++ b/src/plugins/es_ui_shared/public/global_flyout/index.ts @@ -18,8 +18,6 @@ */ export { - CreateRpmPackageTask, - CreateDebPackageTask, - CreateDockerPackageTask, - CreateDockerUbiPackageTask, -} from './create_os_package_tasks'; + GlobalFlyoutProvider, + useGlobalFlyout, +} from '../../__packages_do_not_import__/global_flyout'; diff --git a/src/plugins/es_ui_shared/public/index.ts b/src/plugins/es_ui_shared/public/index.ts index 98a305fe68f08..bdea5ccf5fe26 100644 --- a/src/plugins/es_ui_shared/public/index.ts +++ b/src/plugins/es_ui_shared/public/index.ts @@ -24,6 +24,7 @@ import * as Forms from './forms'; import * as Monaco from './monaco'; import * as ace from './ace'; +import * as GlobalFlyout from './global_flyout'; export { JsonEditor, OnJsonEditorUpdateHandler } from './components/json_editor'; @@ -65,7 +66,7 @@ export { useAuthorizationContext, } from './authorization'; -export { Monaco, Forms, ace }; +export { Monaco, Forms, ace, GlobalFlyout }; export { extractQueryParams } from './url'; diff --git a/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts b/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts index 98287f6bac35d..733a60f1f86ff 100644 --- a/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts +++ b/src/plugins/es_ui_shared/static/forms/helpers/serializers.ts @@ -64,9 +64,13 @@ interface StripEmptyFieldsOptions { * @param options An optional configuration object. By default recursive it turned on. */ export const stripEmptyFields = ( - object: { [key: string]: any }, + object?: { [key: string]: any }, options?: StripEmptyFieldsOptions ): { [key: string]: any } => { + if (object === undefined) { + return {}; + } + const { types = ['string', 'object'], recursive = false } = options || {}; return Object.entries(object).reduce((acc, [key, value]) => { diff --git a/src/plugins/home/server/services/sample_data/routes/list.ts b/src/plugins/home/server/services/sample_data/routes/list.ts index 770b3116b74f1..7cce0fa5cccb3 100644 --- a/src/plugins/home/server/services/sample_data/routes/list.ts +++ b/src/plugins/home/server/services/sample_data/routes/list.ts @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import { isBoom } from 'boom'; import { IRouter } from 'src/core/server'; import { SampleDatasetSchema } from '../lib/sample_dataset_registry_types'; import { createIndexName } from '../lib/create_index_name'; @@ -75,8 +74,7 @@ export const createListRoute = (router: IRouter, sampleDatasets: SampleDatasetSc try { await context.core.savedObjects.client.get('dashboard', sampleDataset.overviewDashboard); } catch (err) { - // savedObjectClient.get() throws an boom error when object is not found. - if (isBoom(err) && err.output.statusCode === 404) { + if (context.core.savedObjects.client.errors.isNotFoundError(err)) { sampleDataset.status = NOT_INSTALLED; return; } diff --git a/src/plugins/timelion/public/directives/timelion_expression_suggestions/__tests__/timelion_expression_suggestions.js b/src/plugins/timelion/public/directives/timelion_expression_suggestions/__tests__/timelion_expression_suggestions.js deleted file mode 100644 index 8a35a72ed19e6..0000000000000 --- a/src/plugins/timelion/public/directives/timelion_expression_suggestions/__tests__/timelion_expression_suggestions.js +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../timelion_expression_suggestions'; - -describe('Timelion expression suggestions directive', function () { - let scope; - let $compile; - - beforeEach(ngMock.module('kibana')); - - beforeEach( - ngMock.inject(function ($injector) { - $compile = $injector.get('$compile'); - scope = $injector.get('$rootScope').$new(); - }) - ); - - describe('attributes', function () { - describe('suggestions', function () { - let element = null; - const template = ``; - - beforeEach(function () { - element = $compile(template)(scope); - scope.$apply(() => { - scope.list = [{ name: 'suggestion1' }, { name: 'suggestion2' }, { name: 'suggestion3' }]; - }); - }); - - it('are rendered', function () { - expect(element.find('[data-test-subj="timelionSuggestionListItem"]').length).to.be( - scope.list.length - ); - }); - }); - }); -}); diff --git a/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx b/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx index 6713c2ce2391b..11ceb5885dd31 100644 --- a/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx +++ b/src/plugins/vis_default_editor/public/components/sidebar/sidebar_title.tsx @@ -65,7 +65,7 @@ export function LinkedSearch({ savedSearch, eventEmitter }: LinkedSearchProps) { }, [eventEmitter]); const onClickViewInDiscover = useCallback(() => { application.navigateToApp('discover', { - path: `#/${savedSearch.id}`, + path: `#/view/${savedSearch.id}`, }); }, [application, savedSearch.id]); @@ -128,7 +128,12 @@ export function LinkedSearch({ savedSearch, eventEmitter }: LinkedSearchProps) {

- + { isEmbeddableRendered={isEmbeddableRendered} hasUnappliedChanges={hasUnappliedChanges} originatingApp={originatingApp} + setOriginatingApp={setOriginatingApp} savedVisInstance={savedVisInstance} stateContainer={appState} visualizationIdFromUrl={visualizationIdFromUrl} diff --git a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx index 2e7dba46487ad..f00c26f83e1e5 100644 --- a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx +++ b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx @@ -40,6 +40,7 @@ interface VisualizeTopNavProps { setHasUnsavedChanges: (value: boolean) => void; hasUnappliedChanges: boolean; originatingApp?: string; + setOriginatingApp?: (originatingApp: string | undefined) => void; savedVisInstance: SavedVisInstance; stateContainer: VisualizeAppStateContainer; visualizationIdFromUrl?: string; @@ -53,6 +54,7 @@ const TopNav = ({ setHasUnsavedChanges, hasUnappliedChanges, originatingApp, + setOriginatingApp, savedVisInstance, stateContainer, visualizationIdFromUrl, @@ -86,6 +88,7 @@ const TopNav = ({ hasUnappliedChanges, openInspector, originatingApp, + setOriginatingApp, savedVisInstance, stateContainer, visualizationIdFromUrl, @@ -100,6 +103,7 @@ const TopNav = ({ hasUnappliedChanges, openInspector, originatingApp, + setOriginatingApp, savedVisInstance, stateContainer, visualizationIdFromUrl, diff --git a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx index 96f64c6478fa9..392168a530087 100644 --- a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx +++ b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx @@ -39,6 +39,7 @@ interface TopNavConfigParams { setHasUnsavedChanges: (value: boolean) => void; openInspector: () => void; originatingApp?: string; + setOriginatingApp?: (originatingApp: string | undefined) => void; hasUnappliedChanges: boolean; savedVisInstance: SavedVisInstance; stateContainer: VisualizeAppStateContainer; @@ -51,6 +52,7 @@ export const getTopNavConfig = ( setHasUnsavedChanges, openInspector, originatingApp, + setOriginatingApp, hasUnappliedChanges, savedVisInstance: { embeddableHandler, savedVis, vis }, stateContainer, @@ -112,6 +114,9 @@ export const getTopNavConfig = ( application.navigateToApp(originatingApp); } } else { + if (setOriginatingApp && originatingApp && savedVis.copyOnSave) { + setOriginatingApp(undefined); + } chrome.docTitle.change(savedVis.lastSavedTitle); chrome.setBreadcrumbs(getEditBreadcrumbs(savedVis.lastSavedTitle)); diff --git a/test/api_integration/apis/saved_objects/migrations.js b/test/api_integration/apis/saved_objects/migrations.ts similarity index 68% rename from test/api_integration/apis/saved_objects/migrations.js rename to test/api_integration/apis/saved_objects/migrations.ts index ed259ccec0114..9997d9710e212 100644 --- a/test/api_integration/apis/saved_objects/migrations.js +++ b/test/api_integration/apis/saved_objects/migrations.ts @@ -23,22 +23,39 @@ import { set } from '@elastic/safer-lodash-set'; import _ from 'lodash'; -import { assert } from 'chai'; +import expect from '@kbn/expect'; +import { ElasticsearchClient, SavedObjectMigrationMap, SavedObjectsType } from 'src/core/server'; +import { SearchResponse } from '../../../../src/core/server/elasticsearch/client'; import { DocumentMigrator, IndexMigrator, + createMigrationEsClient, } from '../../../../src/core/server/saved_objects/migrations/core'; +import { SavedObjectsTypeMappingDefinitions } from '../../../../src/core/server/saved_objects/mappings'; + import { SavedObjectsSerializer, SavedObjectTypeRegistry, } from '../../../../src/core/server/saved_objects'; - -export default ({ getService }) => { - const es = getService('legacyEs'); - const callCluster = (path, ...args) => _.get(es, path).call(es, ...args); +import { FtrProviderContext } from '../../ftr_provider_context'; + +function getLogMock() { + return { + debug() {}, + error() {}, + fatal() {}, + info() {}, + log() {}, + trace() {}, + warn() {}, + get: getLogMock, + }; +} +export default ({ getService }: FtrProviderContext) => { + const esClient = getService('es'); describe('Kibana index migration', () => { - before(() => callCluster('indices.delete', { index: '.migrate-*' })); + before(() => esClient.indices.delete({ index: '.migrate-*' })); it('Migrates an existing index that has never been migrated before', async () => { const index = '.migration-a'; @@ -55,7 +72,7 @@ export default ({ getService }) => { bar: { properties: { mynum: { type: 'integer' } } }, }; - const migrations = { + const migrations: Record = { foo: { '1.0.0': (doc) => set(doc, 'attributes.name', doc.attributes.name.toUpperCase()), }, @@ -66,11 +83,11 @@ export default ({ getService }) => { }, }; - await createIndex({ callCluster, index }); - await createDocs({ callCluster, index, docs: originalDocs }); + await createIndex({ esClient, index }); + await createDocs({ esClient, index, docs: originalDocs }); // Test that unrelated index templates are unaffected - await callCluster('indices.putTemplate', { + await esClient.indices.putTemplate({ name: 'migration_test_a_template', body: { index_patterns: 'migration_test_a', @@ -82,7 +99,7 @@ export default ({ getService }) => { }); // Test that obsolete index templates get removed - await callCluster('indices.putTemplate', { + await esClient.indices.putTemplate({ name: 'migration_a_template', body: { index_patterns: index, @@ -93,29 +110,37 @@ export default ({ getService }) => { }, }); - assert.isTrue(await callCluster('indices.existsTemplate', { name: 'migration_a_template' })); + const migrationATemplate = await esClient.indices.existsTemplate({ + name: 'migration_a_template', + }); + expect(migrationATemplate.body).to.be.ok(); const result = await migrateIndex({ - callCluster, + esClient, index, migrations, mappingProperties, obsoleteIndexTemplatePattern: 'migration_a*', }); - assert.isFalse(await callCluster('indices.existsTemplate', { name: 'migration_a_template' })); - assert.isTrue( - await callCluster('indices.existsTemplate', { name: 'migration_test_a_template' }) - ); + const migrationATemplateAfter = await esClient.indices.existsTemplate({ + name: 'migration_a_template', + }); - assert.deepEqual(_.omit(result, 'elapsedMs'), { + expect(migrationATemplateAfter.body).not.to.be.ok(); + const migrationTestATemplateAfter = await esClient.indices.existsTemplate({ + name: 'migration_test_a_template', + }); + + expect(migrationTestATemplateAfter.body).to.be.ok(); + expect(_.omit(result, 'elapsedMs')).to.eql({ destIndex: '.migration-a_2', sourceIndex: '.migration-a_1', status: 'migrated', }); // The docs in the original index are unchanged - assert.deepEqual(await fetchDocs({ callCluster, index: `${index}_1` }), [ + expect(await fetchDocs(esClient, `${index}_1`)).to.eql([ { id: 'bar:i', type: 'bar', bar: { nomnom: 33 } }, { id: 'bar:o', type: 'bar', bar: { nomnom: 2 } }, { id: 'baz:u', type: 'baz', baz: { title: 'Terrific!' } }, @@ -124,7 +149,7 @@ export default ({ getService }) => { ]); // The docs in the alias have been migrated - assert.deepEqual(await fetchDocs({ callCluster, index }), [ + expect(await fetchDocs(esClient, index)).to.eql([ { id: 'bar:i', type: 'bar', @@ -171,7 +196,7 @@ export default ({ getService }) => { bar: { properties: { mynum: { type: 'integer' } } }, }; - const migrations = { + const migrations: Record = { foo: { '1.0.0': (doc) => set(doc, 'attributes.name', doc.attributes.name.toUpperCase()), }, @@ -182,19 +207,20 @@ export default ({ getService }) => { }, }; - await createIndex({ callCluster, index }); - await createDocs({ callCluster, index, docs: originalDocs }); + await createIndex({ esClient, index }); + await createDocs({ esClient, index, docs: originalDocs }); - await migrateIndex({ callCluster, index, migrations, mappingProperties }); + await migrateIndex({ esClient, index, migrations, mappingProperties }); + // @ts-expect-error name doesn't exist on mynum type mappingProperties.bar.properties.name = { type: 'keyword' }; migrations.foo['2.0.1'] = (doc) => set(doc, 'attributes.name', `${doc.attributes.name}v2`); migrations.bar['2.3.4'] = (doc) => set(doc, 'attributes.name', `NAME ${doc.id}`); - await migrateIndex({ callCluster, index, migrations, mappingProperties }); + await migrateIndex({ esClient, index, migrations, mappingProperties }); // The index for the initial migration has not been destroyed... - assert.deepEqual(await fetchDocs({ callCluster, index: `${index}_2` }), [ + expect(await fetchDocs(esClient, `${index}_2`)).to.eql([ { id: 'bar:i', type: 'bar', @@ -226,7 +252,7 @@ export default ({ getService }) => { ]); // The docs were migrated again... - assert.deepEqual(await fetchDocs({ callCluster, index }), [ + expect(await fetchDocs(esClient, index)).to.eql([ { id: 'bar:i', type: 'bar', @@ -266,48 +292,43 @@ export default ({ getService }) => { foo: { properties: { name: { type: 'text' } } }, }; - const migrations = { + const migrations: Record = { foo: { '1.0.0': (doc) => set(doc, 'attributes.name', 'LOTR'), }, }; - await createIndex({ callCluster, index }); - await createDocs({ callCluster, index, docs: originalDocs }); + await createIndex({ esClient, index }); + await createDocs({ esClient, index, docs: originalDocs }); const result = await Promise.all([ - migrateIndex({ callCluster, index, migrations, mappingProperties }), - migrateIndex({ callCluster, index, migrations, mappingProperties }), + migrateIndex({ esClient, index, migrations, mappingProperties }), + migrateIndex({ esClient, index, migrations, mappingProperties }), ]); // The polling instance and the migrating instance should both - // return a similar migraiton result. - assert.deepEqual( + // return a similar migration result. + expect( result + // @ts-expect-error destIndex exists only on MigrationResult status: 'migrated'; .map(({ status, destIndex }) => ({ status, destIndex })) - .sort((a) => (a.destIndex ? 0 : 1)), - [ - { status: 'migrated', destIndex: '.migration-c_2' }, - { status: 'skipped', destIndex: undefined }, - ] - ); + .sort((a) => (a.destIndex ? 0 : 1)) + ).to.eql([ + { status: 'migrated', destIndex: '.migration-c_2' }, + { status: 'skipped', destIndex: undefined }, + ]); + const { body } = await esClient.cat.indices({ index: '.migration-c*', format: 'json' }); // It only created the original and the dest - assert.deepEqual( - _.map( - await callCluster('cat.indices', { index: '.migration-c*', format: 'json' }), - 'index' - ).sort(), - ['.migration-c_1', '.migration-c_2'] - ); + expect(_.map(body, 'index').sort()).to.eql(['.migration-c_1', '.migration-c_2']); // The docs in the original index are unchanged - assert.deepEqual(await fetchDocs({ callCluster, index: `${index}_1` }), [ + expect(await fetchDocs(esClient, `${index}_1`)).to.eql([ { id: 'foo:lotr', type: 'foo', foo: { name: 'Lord of the Rings' } }, ]); // The docs in the alias have been migrated - assert.deepEqual(await fetchDocs({ callCluster, index }), [ + expect(await fetchDocs(esClient, index)).to.eql([ { id: 'foo:lotr', type: 'foo', @@ -320,38 +341,53 @@ export default ({ getService }) => { }); }; -async function createIndex({ callCluster, index }) { - await callCluster('indices.delete', { index: `${index}*`, ignore: [404] }); +async function createIndex({ esClient, index }: { esClient: ElasticsearchClient; index: string }) { + await esClient.indices.delete({ index: `${index}*` }, { ignore: [404] }); const properties = { type: { type: 'keyword' }, foo: { properties: { name: { type: 'keyword' } } }, bar: { properties: { nomnom: { type: 'integer' } } }, baz: { properties: { title: { type: 'keyword' } } }, }; - await callCluster('indices.create', { + await esClient.indices.create({ index, body: { mappings: { dynamic: 'strict', properties } }, }); } -async function createDocs({ callCluster, index, docs }) { - await callCluster('bulk', { +async function createDocs({ + esClient, + index, + docs, +}: { + esClient: ElasticsearchClient; + index: string; + docs: any[]; +}) { + await esClient.bulk({ body: docs.reduce((acc, doc) => { acc.push({ index: { _id: doc.id, _index: index } }); acc.push(_.omit(doc, 'id')); return acc; }, []), }); - await callCluster('indices.refresh', { index }); + await esClient.indices.refresh({ index }); } async function migrateIndex({ - callCluster, + esClient, index, migrations, mappingProperties, validateDoc, obsoleteIndexTemplatePattern, +}: { + esClient: ElasticsearchClient; + index: string; + migrations: Record; + mappingProperties: SavedObjectsTypeMappingDefinitions; + validateDoc?: (doc: any) => void; + obsoleteIndexTemplatePattern?: string; }) { const typeRegistry = new SavedObjectTypeRegistry(); const types = migrationsToTypes(migrations); @@ -361,17 +397,17 @@ async function migrateIndex({ kibanaVersion: '99.9.9', typeRegistry, validateDoc: validateDoc || _.noop, - log: { info: _.noop, debug: _.noop, warn: _.noop }, + log: getLogMock(), }); const migrator = new IndexMigrator({ - callCluster, + client: createMigrationEsClient(esClient, getLogMock()), documentMigrator, index, obsoleteIndexTemplatePattern, mappingProperties, batchSize: 10, - log: { info: _.noop, debug: _.noop, warn: _.noop }, + log: getLogMock(), pollInterval: 50, scrollDuration: '5m', serializer: new SavedObjectsSerializer(typeRegistry), @@ -380,21 +416,22 @@ async function migrateIndex({ return await migrator.migrate(); } -function migrationsToTypes(migrations) { - return Object.entries(migrations).map(([type, migrations]) => ({ +function migrationsToTypes( + migrations: Record +): SavedObjectsType[] { + return Object.entries(migrations).map(([type, migrationsMap]) => ({ name: type, hidden: false, namespaceType: 'single', mappings: { properties: {} }, - migrations: { ...migrations }, + migrations: { ...migrationsMap }, })); } -async function fetchDocs({ callCluster, index }) { - const { - hits: { hits }, - } = await callCluster('search', { index }); - return hits +async function fetchDocs(esClient: ElasticsearchClient, index: string) { + const { body } = await esClient.search>({ index }); + + return body.hits.hits .map((h) => ({ ...h._source, id: h._id, diff --git a/test/functional/apps/dashboard/dashboard_filtering.js b/test/functional/apps/dashboard/dashboard_filtering.js index cd80f915775c9..0be4fbbebe7c5 100644 --- a/test/functional/apps/dashboard/dashboard_filtering.js +++ b/test/functional/apps/dashboard/dashboard_filtering.js @@ -183,9 +183,6 @@ export default function ({ getService, getPageObjects }) { }); describe('disabling a filter unfilters the data on', function () { - // Flaky test - // https://github.com/elastic/kibana/issues/41087 - this.tags('skipFirefox'); before(async () => { await filterBar.toggleFilterEnabled('bytes'); await PageObjects.header.waitUntilLoadingHasFinished(); diff --git a/test/functional/apps/dashboard/edit_embeddable_redirects.js b/test/functional/apps/dashboard/edit_embeddable_redirects.js index a366e34b121d9..6d3d43890a962 100644 --- a/test/functional/apps/dashboard/edit_embeddable_redirects.js +++ b/test/functional/apps/dashboard/edit_embeddable_redirects.js @@ -75,5 +75,17 @@ export default function ({ getService, getPageObjects }) { const titles = await PageObjects.dashboard.getPanelTitles(); expect(titles.indexOf(newTitle)).to.not.be(-1); }); + + it('loses originatingApp connection after save as when redirectToOrigin is false', async () => { + const newTitle = 'wowee, my title just got cooler again'; + await PageObjects.header.waitUntilLoadingHasFinished(); + await dashboardPanelActions.openContextMenu(); + await dashboardPanelActions.clickEdit(); + await PageObjects.visualize.saveVisualizationExpectSuccess(newTitle, { + saveAsNew: true, + redirectToOrigin: false, + }); + await PageObjects.visualize.notLinkedToOriginatingApp(); + }); }); } diff --git a/test/functional/apps/management/_create_index_pattern_wizard.js b/test/functional/apps/management/_create_index_pattern_wizard.js index 160b052e70d30..9760527371408 100644 --- a/test/functional/apps/management/_create_index_pattern_wizard.js +++ b/test/functional/apps/management/_create_index_pattern_wizard.js @@ -25,8 +25,7 @@ export default function ({ getService, getPageObjects }) { const es = getService('legacyEs'); const PageObjects = getPageObjects(['settings', 'common']); - // Flaky: https://github.com/elastic/kibana/issues/71501 - describe.skip('"Create Index Pattern" wizard', function () { + describe('"Create Index Pattern" wizard', function () { before(async function () { // delete .kibana index and then wait for Kibana to re-create it await kibanaServer.uiSettings.replace({}); diff --git a/test/functional/apps/management/_handle_alias.js b/test/functional/apps/management/_handle_alias.js index 902b49eacdc00..67a4445d17aa0 100644 --- a/test/functional/apps/management/_handle_alias.js +++ b/test/functional/apps/management/_handle_alias.js @@ -26,8 +26,7 @@ export default function ({ getService, getPageObjects }) { const security = getService('security'); const PageObjects = getPageObjects(['common', 'home', 'settings', 'discover', 'timePicker']); - // FLAKY: https://github.com/elastic/kibana/issues/59717 - describe.skip('Index patterns on aliases', function () { + describe('Index patterns on aliases', function () { before(async function () { await security.testUser.setRoles(['kibana_admin', 'test_alias_reader']); await esArchiver.loadIfNeeded('alias'); @@ -50,9 +49,8 @@ export default function ({ getService, getPageObjects }) { }); it('should be able to create index pattern without time field', async function () { - await PageObjects.settings.createIndexPattern('alias1', null); - const patternName = await PageObjects.settings.getIndexPageHeading(); - expect(patternName).to.be('alias1*'); + await PageObjects.settings.navigateTo(); + await PageObjects.settings.createIndexPattern('alias1*', null); }); it('should be able to discover and verify no of hits for alias1', async function () { @@ -64,9 +62,8 @@ export default function ({ getService, getPageObjects }) { }); it('should be able to create index pattern with timefield', async function () { - await PageObjects.settings.createIndexPattern('alias2', 'date'); - const patternName = await PageObjects.settings.getIndexPageHeading(); - expect(patternName).to.be('alias2*'); + await PageObjects.settings.navigateTo(); + await PageObjects.settings.createIndexPattern('alias2*', 'date'); }); it('should be able to discover and verify no of hits for alias2', async function () { diff --git a/test/functional/apps/visualize/_gauge_chart.js b/test/functional/apps/visualize/_gauge_chart.js index aa94e596319c2..0f870b1fb545f 100644 --- a/test/functional/apps/visualize/_gauge_chart.js +++ b/test/functional/apps/visualize/_gauge_chart.js @@ -26,7 +26,6 @@ export default function ({ getService, getPageObjects }) { const testSubjects = getService('testSubjects'); const PageObjects = getPageObjects(['visualize', 'visEditor', 'visChart', 'timePicker']); - // FLAKY: https://github.com/elastic/kibana/issues/45089 describe('gauge chart', function indexPatternCreation() { async function initGaugeVis() { log.debug('navigateToApp visualize'); diff --git a/test/functional/apps/visualize/_linked_saved_searches.ts b/test/functional/apps/visualize/_linked_saved_searches.ts index b0edcb68f3efc..e7b2909afa5a1 100644 --- a/test/functional/apps/visualize/_linked_saved_searches.ts +++ b/test/functional/apps/visualize/_linked_saved_searches.ts @@ -21,8 +21,10 @@ import expect from '@kbn/expect'; import { FtrProviderContext } from '../../ftr_provider_context'; // eslint-disable-next-line import/no-default-export export default function ({ getService, getPageObjects }: FtrProviderContext) { + const browser = getService('browser'); const filterBar = getService('filterBar'); const retry = getService('retry'); + const testSubjects = getService('testSubjects'); const PageObjects = getPageObjects([ 'common', 'discover', @@ -35,12 +37,14 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { describe('saved search visualizations from visualize app', function describeIndexTests() { describe('linked saved searched', () => { const savedSearchName = 'vis_saved_search'; + let discoverSavedSearchUrlPath: string; before(async () => { await PageObjects.common.navigateToApp('discover'); await filterBar.addFilter('extension.raw', 'is', 'jpg'); await PageObjects.header.waitUntilLoadingHasFinished(); await PageObjects.discover.saveSearch(savedSearchName); + discoverSavedSearchUrlPath = (await browser.getCurrentUrl()).split('?')[0]; }); it('should create a visualization from a saved search', async () => { @@ -54,6 +58,24 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { }); }); + it('should have a valid link to the saved search from the visualization', async () => { + await testSubjects.click('showUnlinkSavedSearchPopover'); + await testSubjects.click('viewSavedSearch'); + await PageObjects.header.waitUntilLoadingHasFinished(); + + await retry.waitFor('wait discover load its breadcrumbs', async () => { + const discoverBreadcrumb = await PageObjects.discover.getCurrentQueryName(); + return discoverBreadcrumb === savedSearchName; + }); + + const discoverURLPath = (await browser.getCurrentUrl()).split('?')[0]; + expect(discoverURLPath).to.equal(discoverSavedSearchUrlPath); + + // go back to visualize + await browser.goBack(); + await PageObjects.header.waitUntilLoadingHasFinished(); + }); + it('should respect the time filter when linked to a saved search', async () => { await PageObjects.timePicker.setAbsoluteRange( 'Sep 19, 2015 @ 06:31:44.000', diff --git a/test/functional/apps/visualize/_tsvb_time_series.ts b/test/functional/apps/visualize/_tsvb_time_series.ts index e0d512c1f4861..c048755fc5fbe 100644 --- a/test/functional/apps/visualize/_tsvb_time_series.ts +++ b/test/functional/apps/visualize/_tsvb_time_series.ts @@ -107,7 +107,7 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) { expect(actualCount).to.be(expectedLegendValue); }); - it.skip('should show the correct count in the legend with "Human readable" duration formatter', async () => { + it('should show the correct count in the legend with "Human readable" duration formatter', async () => { await visualBuilder.clickSeriesOption(); await visualBuilder.changeDataFormatter('Duration'); await visualBuilder.setDurationFormatterSettings({ to: 'Human readable' }); diff --git a/test/functional/apps/visualize/_vega_chart.js b/test/functional/apps/visualize/_vega_chart.ts similarity index 59% rename from test/functional/apps/visualize/_vega_chart.js rename to test/functional/apps/visualize/_vega_chart.ts index c530c6f823133..6c0b77411ae99 100644 --- a/test/functional/apps/visualize/_vega_chart.js +++ b/test/functional/apps/visualize/_vega_chart.ts @@ -18,9 +18,17 @@ */ import expect from '@kbn/expect'; +import { FtrProviderContext } from '../../ftr_provider_context'; -export default function ({ getService, getPageObjects }) { - const PageObjects = getPageObjects(['timePicker', 'visualize', 'visChart', 'vegaChart']); +// eslint-disable-next-line import/no-default-export +export default function ({ getPageObjects, getService }: FtrProviderContext) { + const PageObjects = getPageObjects([ + 'timePicker', + 'visualize', + 'visChart', + 'visEditor', + 'vegaChart', + ]); const filterBar = getService('filterBar'); const log = getService('log'); @@ -30,13 +38,15 @@ export default function ({ getService, getPageObjects }) { await PageObjects.visualize.navigateToNewVisualization(); log.debug('clickVega'); await PageObjects.visualize.clickVega(); + await PageObjects.visChart.waitForVisualizationRenderingStabilized(); }); describe('vega chart', () => { describe('initial render', () => { - it.skip('should have some initial vega spec text', async function () { + it('should have some initial vega spec text', async function () { const vegaSpec = await PageObjects.vegaChart.getSpec(); - expect(vegaSpec).to.contain('{').and.to.contain('data'); + expect(vegaSpec).to.contain('{'); + expect(vegaSpec).to.contain('data'); expect(vegaSpec.length).to.be.above(500); }); @@ -44,7 +54,8 @@ export default function ({ getService, getPageObjects }) { const view = await PageObjects.vegaChart.getViewContainer(); expect(view).to.be.ok(); const size = await view.getSize(); - expect(size).to.have.property('width').and.to.have.property('height'); + expect(size).to.have.property('width'); + expect(size).to.have.property('height'); expect(size.width).to.be.above(0); expect(size.height).to.be.above(0); @@ -63,10 +74,18 @@ export default function ({ getService, getPageObjects }) { await filterBar.removeAllFilters(); }); - it.skip('should render different data in response to filter change', async function () { - await PageObjects.vegaChart.expectVisToMatchScreenshot('vega_chart'); + it('should render different data in response to filter change', async function () { + await PageObjects.vegaChart.typeInSpec('"config": { "kibana": {"renderer": "svg"} },'); + await PageObjects.visEditor.clickGo(); + await PageObjects.visChart.waitForVisualizationRenderingStabilized(); + const fullDataLabels = await PageObjects.vegaChart.getYAxisLabels(); + expect(fullDataLabels[0]).to.eql('0'); + expect(fullDataLabels[fullDataLabels.length - 1]).to.eql('1,600'); await filterBar.addFilter('@tags.raw', 'is', 'error'); - await PageObjects.vegaChart.expectVisToMatchScreenshot('vega_chart_filtered'); + await PageObjects.visChart.waitForVisualizationRenderingStabilized(); + const filteredDataLabels = await PageObjects.vegaChart.getYAxisLabels(); + expect(filteredDataLabels[0]).to.eql('0'); + expect(filteredDataLabels[filteredDataLabels.length - 1]).to.eql('90'); }); }); }); diff --git a/test/functional/page_objects/vega_chart_page.ts b/test/functional/page_objects/vega_chart_page.ts index 488f4cfd0d0ce..b9906911b00f1 100644 --- a/test/functional/page_objects/vega_chart_page.ts +++ b/test/functional/page_objects/vega_chart_page.ts @@ -17,20 +17,17 @@ * under the License. */ -import expect from '@kbn/expect'; +import { Key } from 'selenium-webdriver'; import { FtrProviderContext } from '../ftr_provider_context'; export function VegaChartPageProvider({ getService, getPageObjects, - updateBaselines, }: FtrProviderContext & { updateBaselines: boolean }) { const find = getService('find'); const testSubjects = getService('testSubjects'); const browser = getService('browser'); - const screenshot = getService('screenshots'); - const log = getService('log'); - const { visEditor, visChart } = getPageObjects(['visEditor', 'visChart']); + const { common } = getPageObjects(['common']); class VegaChartPage { public async getSpec() { @@ -45,6 +42,19 @@ export function VegaChartPageProvider({ return linesText.join('\n'); } + public async typeInSpec(text: string) { + const editor = await testSubjects.find('vega-editor'); + const textarea = await editor.findByClassName('ace_content'); + await textarea.click(); + let repeats = 20; + while (--repeats > 0) { + await browser.pressKeys(Key.ARROW_UP); + await common.sleep(50); + } + await browser.pressKeys(Key.ARROW_RIGHT); + await browser.pressKeys(text); + } + public async getViewContainer() { return await find.byCssSelector('div.vgaVis__view'); } @@ -53,37 +63,16 @@ export function VegaChartPageProvider({ return await find.byCssSelector('div.vgaVis__controls'); } - /** - * Removes chrome and takes a small screenshot of a vis to compare against a baseline. - * @param {string} name The name of the baseline image. - * @param {object} opts Options object. - * @param {number} opts.threshold Threshold for allowed variance when comparing images. - */ - public async expectVisToMatchScreenshot(name: string, opts = { threshold: 0.05 }) { - log.debug(`expectVisToMatchScreenshot(${name})`); - - // Collapse sidebar and inject some CSS to hide the nav so we have a focused screenshot - await visEditor.clickEditorSidebarCollapse(); - await visChart.waitForVisualizationRenderingStabilized(); - await browser.execute(` - var el = document.createElement('style'); - el.id = '__data-test-style'; - el.innerHTML = '[data-test-subj="headerGlobalNav"] { display: none; } '; - el.innerHTML += '[data-test-subj="top-nav"] { display: none; } '; - el.innerHTML += '[data-test-subj="experimentalVisInfo"] { display: none; } '; - document.body.appendChild(el); - `); - - const percentDifference = await screenshot.compareAgainstBaseline(name, updateBaselines); - - // Reset the chart to its original state - await browser.execute(` - var el = document.getElementById('__data-test-style'); - document.body.removeChild(el); - `); - await visEditor.clickEditorSidebarCollapse(); - await visChart.waitForVisualizationRenderingStabilized(); - expect(percentDifference).to.be.lessThan(opts.threshold); + public async getYAxisLabels() { + const chart = await testSubjects.find('visualizationLoader'); + const yAxis = await chart.findByCssSelector('[aria-label^="Y-axis"]'); + const tickGroup = await yAxis.findByClassName('role-axis-label'); + const labels = await tickGroup.findAllByCssSelector('text'); + const labelTexts: string[] = []; + for (const label of labels) { + labelTexts.push(await label.getVisibleText()); + } + return labelTexts; } } diff --git a/test/functional/page_objects/visual_builder_page.ts b/test/functional/page_objects/visual_builder_page.ts index 4a4beca959540..0db8cac0f0758 100644 --- a/test/functional/page_objects/visual_builder_page.ts +++ b/test/functional/page_objects/visual_builder_page.ts @@ -279,8 +279,10 @@ export function VisualBuilderPageProvider({ getService, getPageObjects }: FtrPro decimalPlaces?: string; }) { if (from) { - const fromCombobox = await find.byCssSelector('[id$="from-row"] .euiComboBox'); - await comboBox.setElement(fromCombobox, from, { clickWithMouse: true }); + await retry.try(async () => { + const fromCombobox = await find.byCssSelector('[id$="from-row"] .euiComboBox'); + await comboBox.setElement(fromCombobox, from, { clickWithMouse: true }); + }); } if (to) { const toCombobox = await find.byCssSelector('[id$="to-row"] .euiComboBox'); diff --git a/test/functional/page_objects/visualize_page.ts b/test/functional/page_objects/visualize_page.ts index a08598fc42d68..92692767b096d 100644 --- a/test/functional/page_objects/visualize_page.ts +++ b/test/functional/page_objects/visualize_page.ts @@ -352,6 +352,16 @@ export function VisualizePageProvider({ getService, getPageObjects }: FtrProvide await testSubjects.existOrFail('visualizesaveAndReturnButton'); await testSubjects.click('visualizesaveAndReturnButton'); } + + public async linkedToOriginatingApp() { + await header.waitUntilLoadingHasFinished(); + await testSubjects.existOrFail('visualizesaveAndReturnButton'); + } + + public async notLinkedToOriginatingApp() { + await header.waitUntilLoadingHasFinished(); + await testSubjects.missingOrFail('visualizesaveAndReturnButton'); + } } return new VisualizePage(); diff --git a/test/functional/screenshots/baseline/vega_chart.png b/test/functional/screenshots/baseline/vega_chart.png deleted file mode 100644 index 5288bd9c7b924..0000000000000 Binary files a/test/functional/screenshots/baseline/vega_chart.png and /dev/null differ diff --git a/test/functional/screenshots/baseline/vega_chart_filtered.png b/test/functional/screenshots/baseline/vega_chart_filtered.png deleted file mode 100644 index 974ede74095d2..0000000000000 Binary files a/test/functional/screenshots/baseline/vega_chart_filtered.png and /dev/null differ diff --git a/x-pack/package.json b/x-pack/package.json index 1de009ae1232f..39bdb76ac7a73 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -49,7 +49,7 @@ "@testing-library/react-hooks": "^3.2.1", "@testing-library/jest-dom": "^5.8.0", "@types/angular": "^1.6.56", - "@types/archiver": "^3.0.0", + "@types/archiver": "^3.1.0", "@types/base64-js": "^1.2.5", "@types/boom": "^7.2.0", "@types/cheerio": "^0.22.10", diff --git a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/action_context.test.ts b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/action_context.test.ts index a72a7343c5904..3f5addb77cb33 100644 --- a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/action_context.test.ts +++ b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/action_context.test.ts @@ -9,11 +9,6 @@ import { ParamsSchema } from './alert_type_params'; describe('ActionContext', () => { it('generates expected properties if aggField is null', async () => { - const base: BaseActionContext = { - date: '2020-01-01T00:00:00.000Z', - group: '[group]', - value: 42, - }; const params = ParamsSchema.validate({ index: '[index]', timeField: '[timeField]', @@ -26,6 +21,11 @@ describe('ActionContext', () => { thresholdComparator: '>', threshold: [4], }); + const base: BaseActionContext = { + date: '2020-01-01T00:00:00.000Z', + group: '[group]', + value: 42, + }; const context = addMessages({ name: '[alert-name]' }, base, params); expect(context.title).toMatchInlineSnapshot( `"alert [alert-name] group [group] exceeded threshold"` @@ -36,11 +36,6 @@ describe('ActionContext', () => { }); it('generates expected properties if aggField is not null', async () => { - const base: BaseActionContext = { - date: '2020-01-01T00:00:00.000Z', - group: '[group]', - value: 42, - }; const params = ParamsSchema.validate({ index: '[index]', timeField: '[timeField]', @@ -54,6 +49,11 @@ describe('ActionContext', () => { thresholdComparator: '>', threshold: [4.2], }); + const base: BaseActionContext = { + date: '2020-01-01T00:00:00.000Z', + group: '[group]', + value: 42, + }; const context = addMessages({ name: '[alert-name]' }, base, params); expect(context.title).toMatchInlineSnapshot( `"alert [alert-name] group [group] exceeded threshold"` @@ -64,11 +64,6 @@ describe('ActionContext', () => { }); it('generates expected properties if comparator is between', async () => { - const base: BaseActionContext = { - date: '2020-01-01T00:00:00.000Z', - group: '[group]', - value: 4, - }; const params = ParamsSchema.validate({ index: '[index]', timeField: '[timeField]', @@ -81,6 +76,11 @@ describe('ActionContext', () => { thresholdComparator: 'between', threshold: [4, 5], }); + const base: BaseActionContext = { + date: '2020-01-01T00:00:00.000Z', + group: '[group]', + value: 4, + }; const context = addMessages({ name: '[alert-name]' }, base, params); expect(context.title).toMatchInlineSnapshot( `"alert [alert-name] group [group] exceeded threshold"` diff --git a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.test.ts b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.test.ts index d3583fd4cdb0b..e33a3e775ca96 100644 --- a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.test.ts +++ b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.test.ts @@ -47,6 +47,52 @@ describe('alertType', () => { "name": "value", }, ], + "params": Array [ + Object { + "description": "An array of values to use as the threshold; 'between' and 'notBetween' require two values, the others require one.", + "name": "threshold", + }, + Object { + "description": "A comparison function to use to determine if the threshold as been met.", + "name": "thresholdComparator", + }, + Object { + "description": "index", + "name": "index", + }, + Object { + "description": "timeField", + "name": "timeField", + }, + Object { + "description": "aggType", + "name": "aggType", + }, + Object { + "description": "aggField", + "name": "aggField", + }, + Object { + "description": "groupBy", + "name": "groupBy", + }, + Object { + "description": "termField", + "name": "termField", + }, + Object { + "description": "termSize", + "name": "termSize", + }, + Object { + "description": "timeWindowSize", + "name": "timeWindowSize", + }, + Object { + "description": "timeWindowUnit", + "name": "timeWindowUnit", + }, + ], } `); }); diff --git a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.ts b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.ts index 153334cb64047..c0522c08a7b96 100644 --- a/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.ts +++ b/x-pack/plugins/alerting_builtins/server/alert_types/index_threshold/alert_type.ts @@ -14,6 +14,7 @@ import { BUILT_IN_ALERTS_FEATURE_ID } from '../../../common'; export const ID = '.index-threshold'; +import { CoreQueryParamsSchemaProperties } from './lib/core_query_types'; const ActionGroupId = 'threshold met'; const ComparatorFns = getComparatorFns(); export const ComparatorFnNames = new Set(ComparatorFns.keys()); @@ -67,6 +68,30 @@ export function getAlertType(service: Service): AlertType { } ); + const actionVariableContextThresholdLabel = i18n.translate( + 'xpack.alertingBuiltins.indexThreshold.actionVariableContextThresholdLabel', + { + defaultMessage: + "An array of values to use as the threshold; 'between' and 'notBetween' require two values, the others require one.", + } + ); + + const actionVariableContextThresholdComparatorLabel = i18n.translate( + 'xpack.alertingBuiltins.indexThreshold.actionVariableContextThresholdComparatorLabel', + { + defaultMessage: 'A comparison function to use to determine if the threshold as been met.', + } + ); + + const alertParamsVariables = Object.keys(CoreQueryParamsSchemaProperties).map( + (propKey: string) => { + return { + name: propKey, + description: propKey, + }; + } + ); + return { id: ID, name: alertTypeName, @@ -83,6 +108,11 @@ export function getAlertType(service: Service): AlertType { { name: 'date', description: actionVariableContextDateLabel }, { name: 'value', description: actionVariableContextValueLabel }, ], + params: [ + { name: 'threshold', description: actionVariableContextThresholdLabel }, + { name: 'thresholdComparator', description: actionVariableContextThresholdComparatorLabel }, + ...alertParamsVariables, + ], }, executor, producer: BUILT_IN_ALERTS_FEATURE_ID, diff --git a/x-pack/plugins/alerts/server/alert_type_registry.test.ts b/x-pack/plugins/alerts/server/alert_type_registry.test.ts index c740390713715..229847bda1836 100644 --- a/x-pack/plugins/alerts/server/alert_type_registry.test.ts +++ b/x-pack/plugins/alerts/server/alert_type_registry.test.ts @@ -208,6 +208,7 @@ describe('get()', () => { ], "actionVariables": Object { "context": Array [], + "params": Array [], "state": Array [], }, "defaultActionGroupId": "default", @@ -261,6 +262,7 @@ describe('list()', () => { ], "actionVariables": Object { "context": Array [], + "params": Array [], "state": Array [], }, "defaultActionGroupId": "testActionGroup", diff --git a/x-pack/plugins/alerts/server/alert_type_registry.ts b/x-pack/plugins/alerts/server/alert_type_registry.ts index c466d0e96382c..19d3bf13bd66d 100644 --- a/x-pack/plugins/alerts/server/alert_type_registry.ts +++ b/x-pack/plugins/alerts/server/alert_type_registry.ts @@ -119,5 +119,6 @@ function normalizedActionVariables(actionVariables: AlertType['actionVariables'] return { context: actionVariables?.context ?? [], state: actionVariables?.state ?? [], + params: actionVariables?.params ?? [], }; } diff --git a/x-pack/plugins/alerts/server/task_runner/create_execution_handler.test.ts b/x-pack/plugins/alerts/server/task_runner/create_execution_handler.test.ts index 3ea40fe4c3086..677040d8174e3 100644 --- a/x-pack/plugins/alerts/server/task_runner/create_execution_handler.test.ts +++ b/x-pack/plugins/alerts/server/task_runner/create_execution_handler.test.ts @@ -50,6 +50,11 @@ const createExecutionHandlerParams = { }, ], request: {} as KibanaRequest, + alertParams: { + foo: true, + contextVal: 'My other {{context.value}} goes here', + stateVal: 'My other {{state.value}} goes here', + }, }; beforeEach(() => { diff --git a/x-pack/plugins/alerts/server/task_runner/create_execution_handler.ts b/x-pack/plugins/alerts/server/task_runner/create_execution_handler.ts index e1e1568d2f13c..c21d81779e5e0 100644 --- a/x-pack/plugins/alerts/server/task_runner/create_execution_handler.ts +++ b/x-pack/plugins/alerts/server/task_runner/create_execution_handler.ts @@ -5,7 +5,7 @@ */ import { map } from 'lodash'; -import { AlertAction, State, Context, AlertType } from '../types'; +import { AlertAction, State, Context, AlertType, AlertParams } from '../types'; import { Logger, KibanaRequest } from '../../../../../src/core/server'; import { transformActionParams } from './transform_action_params'; import { PluginStartContract as ActionsPluginStartContract } from '../../../actions/server'; @@ -24,6 +24,7 @@ interface CreateExecutionHandlerOptions { logger: Logger; eventLogger: IEventLogger; request: KibanaRequest; + alertParams: AlertParams; } interface ExecutionHandlerOptions { @@ -45,6 +46,7 @@ export function createExecutionHandler({ alertType, eventLogger, request, + alertParams, }: CreateExecutionHandlerOptions) { const alertTypeActionGroups = new Set(map(alertType.actionGroups, 'id')); return async ({ actionGroup, context, state, alertInstanceId }: ExecutionHandlerOptions) => { @@ -66,6 +68,7 @@ export function createExecutionHandler({ context, actionParams: action.params, state, + alertParams, }), }; }); diff --git a/x-pack/plugins/alerts/server/task_runner/task_runner.ts b/x-pack/plugins/alerts/server/task_runner/task_runner.ts index e4d04a005c986..04fea58f250a3 100644 --- a/x-pack/plugins/alerts/server/task_runner/task_runner.ts +++ b/x-pack/plugins/alerts/server/task_runner/task_runner.ts @@ -110,7 +110,8 @@ export class TaskRunner { tags: string[] | undefined, spaceId: string, apiKey: string | null, - actions: Alert['actions'] + actions: Alert['actions'], + alertParams: RawAlert['params'] ) { return createExecutionHandler({ alertId, @@ -124,6 +125,7 @@ export class TaskRunner { alertType: this.alertType, eventLogger: this.context.eventLogger, request: this.getFakeKibanaRequest(spaceId, apiKey), + alertParams, }); } @@ -261,7 +263,8 @@ export class TaskRunner { alert.tags, spaceId, apiKey, - alert.actions + alert.actions, + alert.params ); return this.executeAlertInstances(services, alert, validatedParams, executionHandler, spaceId); } diff --git a/x-pack/plugins/alerts/server/task_runner/transform_action_params.test.ts b/x-pack/plugins/alerts/server/task_runner/transform_action_params.test.ts index d5c310caf3fda..ddbef8e32e708 100644 --- a/x-pack/plugins/alerts/server/task_runner/transform_action_params.test.ts +++ b/x-pack/plugins/alerts/server/task_runner/transform_action_params.test.ts @@ -13,6 +13,7 @@ test('skips non string parameters', () => { empty1: null, empty2: undefined, date: '2019-02-12T21:01:22.479Z', + message: 'Value "{{params.foo}}" exists', }; const result = transformActionParams({ actionParams, @@ -23,6 +24,9 @@ test('skips non string parameters', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: { + foo: 'test', + }, }); expect(result).toMatchInlineSnapshot(` Object { @@ -30,6 +34,7 @@ test('skips non string parameters', () => { "date": "2019-02-12T21:01:22.479Z", "empty1": null, "empty2": undefined, + "message": "Value \\"test\\" exists", "number": 1, } `); @@ -49,6 +54,7 @@ test('missing parameters get emptied out', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -71,6 +77,7 @@ test('context parameters are passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -92,6 +99,7 @@ test('state parameters are passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -113,6 +121,7 @@ test('alertId is passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -134,6 +143,7 @@ test('alertName is passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -155,6 +165,7 @@ test('tags is passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -175,6 +186,7 @@ test('undefined tags is passed to templates', () => { alertName: 'alert-name', spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -196,6 +208,7 @@ test('empty tags is passed to templates', () => { tags: [], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -217,6 +230,7 @@ test('spaceId is passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -238,6 +252,7 @@ test('alertInstanceId is passed to templates', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -261,6 +276,7 @@ test('works recursively', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { @@ -286,6 +302,7 @@ test('works recursively with arrays', () => { tags: ['tag-A', 'tag-B'], spaceId: 'spaceId-A', alertInstanceId: '2', + alertParams: {}, }); expect(result).toMatchInlineSnapshot(` Object { diff --git a/x-pack/plugins/alerts/server/task_runner/transform_action_params.ts b/x-pack/plugins/alerts/server/task_runner/transform_action_params.ts index fa4a0e40ddee5..30f062eee3705 100644 --- a/x-pack/plugins/alerts/server/task_runner/transform_action_params.ts +++ b/x-pack/plugins/alerts/server/task_runner/transform_action_params.ts @@ -6,7 +6,7 @@ import Mustache from 'mustache'; import { isString, cloneDeepWith } from 'lodash'; -import { AlertActionParams, State, Context } from '../types'; +import { AlertActionParams, State, Context, AlertParams } from '../types'; interface TransformActionParamsOptions { alertId: string; @@ -17,6 +17,7 @@ interface TransformActionParamsOptions { actionParams: AlertActionParams; state: State; context: Context; + alertParams: AlertParams; } export function transformActionParams({ @@ -28,6 +29,7 @@ export function transformActionParams({ context, actionParams, state, + alertParams, }: TransformActionParamsOptions): AlertActionParams { const result = cloneDeepWith(actionParams, (value: unknown) => { if (!isString(value)) return; @@ -43,6 +45,7 @@ export function transformActionParams({ alertInstanceId, context, state, + params: alertParams, }; return Mustache.render(value, variables); }); diff --git a/x-pack/plugins/alerts/server/types.ts b/x-pack/plugins/alerts/server/types.ts index 66eec370f2c20..154a9564518e8 100644 --- a/x-pack/plugins/alerts/server/types.ts +++ b/x-pack/plugins/alerts/server/types.ts @@ -23,6 +23,8 @@ import { export type State = Record; // eslint-disable-next-line @typescript-eslint/no-explicit-any export type Context = Record; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type AlertParams = Record; export type WithoutQueryAndParams = Pick>; export type GetServicesFunction = (request: KibanaRequest) => Services; export type GetBasePathFunction = (spaceId?: string) => string; @@ -82,6 +84,7 @@ export interface AlertType { actionVariables?: { context?: ActionVariable[]; state?: ActionVariable[]; + params?: ActionVariable[]; }; } diff --git a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx index d633d466b6614..56c420878cdba 100644 --- a/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx +++ b/x-pack/plugins/apm/public/components/app/Settings/CustomizeUI/CustomLink/index.test.tsx @@ -35,9 +35,8 @@ const data = [ ]; describe('CustomLink', () => { - let callApmApiSpy: jest.SpyInstance; beforeAll(() => { - callApmApiSpy = jest.spyOn(apmApi, 'callApmApi').mockReturnValue({}); + jest.spyOn(apmApi, 'callApmApi').mockReturnValue({}); }); afterAll(() => { jest.resetAllMocks(); @@ -103,7 +102,7 @@ describe('CustomLink', () => { ]); }); - it('checks if create custom link button is available and working', async () => { + it('checks if create custom link button is available and working', () => { const { queryByText, getByText } = render( @@ -115,7 +114,6 @@ describe('CustomLink', () => { act(() => { fireEvent.click(getByText('Create custom link')); }); - await wait(() => expect(callApmApiSpy).toHaveBeenCalled()); expect(queryByText('Create link')).toBeInTheDocument(); }); }); @@ -133,7 +131,7 @@ describe('CustomLink', () => { }); }); - const openFlyout = async () => { + const openFlyout = () => { const component = render( @@ -145,15 +143,12 @@ describe('CustomLink', () => { act(() => { fireEvent.click(component.getByText('Create custom link')); }); - await wait(() => - expect(component.queryByText('Create link')).toBeInTheDocument() - ); - await wait(() => expect(callApmApiSpy).toHaveBeenCalled()); + expect(component.queryByText('Create link')).toBeInTheDocument(); return component; }; it('creates a custom link', async () => { - const component = await openFlyout(); + const component = openFlyout(); const labelInput = component.getByTestId('label'); act(() => { fireEvent.change(labelInput, { @@ -167,7 +162,7 @@ describe('CustomLink', () => { }); }); await act(async () => { - await wait(() => fireEvent.submit(component.getByText('Save'))); + fireEvent.submit(component.getByText('Save')); }); expect(saveCustomLinkSpy).toHaveBeenCalledTimes(1); }); @@ -186,11 +181,12 @@ describe('CustomLink', () => { act(() => { fireEvent.click(editButtons[0]); }); - expect(component.queryByText('Create link')).toBeInTheDocument(); + await wait(() => + expect(component.queryByText('Create link')).toBeInTheDocument() + ); await act(async () => { - await wait(() => fireEvent.click(component.getByText('Delete'))); + fireEvent.click(component.getByText('Delete')); }); - expect(callApmApiSpy).toHaveBeenCalled(); expect(refetch).toHaveBeenCalled(); }); @@ -200,8 +196,8 @@ describe('CustomLink', () => { fireEvent.click(component.getByText('Add another filter')); } }; - it('checks if add filter button is disabled after all elements have been added', async () => { - const component = await openFlyout(); + it('checks if add filter button is disabled after all elements have been added', () => { + const component = openFlyout(); expect(component.getAllByText('service.name').length).toEqual(1); addFilterField(component, 1); expect(component.getAllByText('service.name').length).toEqual(2); @@ -211,8 +207,8 @@ describe('CustomLink', () => { addFilterField(component, 2); expect(component.getAllByText('service.name').length).toEqual(4); }); - it('removes items already selected', async () => { - const component = await openFlyout(); + it('removes items already selected', () => { + const component = openFlyout(); const addFieldAndCheck = ( fieldName: string, diff --git a/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts b/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts index a44fad82f20e6..10651d97f3c3d 100644 --- a/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts +++ b/x-pack/plugins/apm/scripts/upload-telemetry-data/index.ts @@ -87,13 +87,15 @@ async function uploadData() { return client.search(body as any).then((res) => res.body); }, indicesStats: (body) => { - return client.indices.stats(body as any); + return client.indices.stats(body as any).then((res) => res.body); }, transportRequest: ((params) => { - return client.transport.request({ - method: params.method, - path: params.path, - }); + return client.transport + .request({ + method: params.method, + path: params.path, + }) + .then((res) => res.body); }) as CollectTelemetryParams['transportRequest'], }, }); diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts index 2836cf100a432..6f4f92c6833f7 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts @@ -5,8 +5,12 @@ */ import { Observable } from 'rxjs'; import { take } from 'rxjs/operators'; -import { CoreSetup, Logger } from 'src/core/server'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { + CoreSetup, + Logger, + SavedObjectsErrorHelpers, +} from '../../../../../../src/core/server'; import { APMConfig } from '../..'; import { TaskManagerSetupContract, @@ -110,7 +114,7 @@ export async function createApmTelemetry({ return data; } catch (err) { - if (err.output?.statusCode === 404) { + if (SavedObjectsErrorHelpers.isNotFoundError(err)) { // task has not run yet, so no saved object to return return {}; } diff --git a/x-pack/plugins/canvas/public/application.tsx b/x-pack/plugins/canvas/public/application.tsx index b2c836fe4805f..0bbf449ce11f9 100644 --- a/x-pack/plugins/canvas/public/application.tsx +++ b/x-pack/plugins/canvas/public/application.tsx @@ -31,7 +31,7 @@ import { init as initStatsReporter } from './lib/ui_metric'; import { CapabilitiesStrings } from '../i18n'; -import { startServices, services } from './services'; +import { startServices, services, ServicesProvider } from './services'; // @ts-expect-error untyped local import { createHistory, destroyHistory } from './lib/history_provider'; // @ts-expect-error untyped local @@ -52,19 +52,16 @@ export const renderApp = ( ) => { element.classList.add('canvas'); element.classList.add('canvasContainerWrapper'); - const canvasServices = Object.entries(services).reduce((reduction, [key, provider]) => { - reduction[key] = provider.getService(); - - return reduction; - }, {} as Record); ReactDOM.render( - - - - - - + + + + + + + + , element ); diff --git a/x-pack/plugins/canvas/public/components/app/index.js b/x-pack/plugins/canvas/public/components/app/index.js index a1e3b9c09554a..9a6e8719e7f40 100644 --- a/x-pack/plugins/canvas/public/components/app/index.js +++ b/x-pack/plugins/canvas/public/components/app/index.js @@ -8,7 +8,7 @@ import { connect } from 'react-redux'; import { compose, withProps } from 'recompose'; import { getAppReady, getBasePath } from '../../state/selectors/app'; import { appReady, appError } from '../../state/actions/app'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; +import { withServices } from '../../services'; import { App as Component } from './app'; @@ -45,8 +45,8 @@ const mergeProps = (stateProps, dispatchProps, ownProps) => { export const App = compose( connect(mapStateToProps, mapDispatchToProps, mergeProps), - withKibana, + withServices, withProps((props) => ({ - onRouteChange: props.kibana.services.canvas.navLink.updatePath, + onRouteChange: props.services.navLink.updatePath, })) )(Component); diff --git a/x-pack/plugins/canvas/public/components/app/track_route_change.js b/x-pack/plugins/canvas/public/components/app/track_route_change.js deleted file mode 100644 index 2886aa868eb9e..0000000000000 --- a/x-pack/plugins/canvas/public/components/app/track_route_change.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { get } from 'lodash'; -import { getWindow } from '../../lib/get_window'; -import { CANVAS_APP } from '../../../common/lib/constants'; -import { platformService } from '../../services'; - -export function trackRouteChange() { - const basePath = platformService.getService().coreStart.http.basePath.get(); - - platformService - .getService() - .startPlugins.__LEGACY.trackSubUrlForApp( - CANVAS_APP, - platformService - .getService() - .startPlugins.__LEGACY.absoluteToParsedUrl(get(getWindow(), 'location.href'), basePath) - ); -} diff --git a/x-pack/plugins/canvas/public/components/element_content/index.js b/x-pack/plugins/canvas/public/components/element_content/index.js index a138c3acb8ec7..63ece6ac32812 100644 --- a/x-pack/plugins/canvas/public/components/element_content/index.js +++ b/x-pack/plugins/canvas/public/components/element_content/index.js @@ -8,8 +8,8 @@ import PropTypes from 'prop-types'; import { connect } from 'react-redux'; import { compose, withProps } from 'recompose'; import { get } from 'lodash'; +import { withServices } from '../../services'; import { getSelectedPage, getPageById } from '../../state/selectors/workpad'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; import { ElementContent as Component } from './element_content'; const mapStateToProps = (state) => ({ @@ -18,9 +18,9 @@ const mapStateToProps = (state) => ({ export const ElementContent = compose( connect(mapStateToProps), - withKibana, - withProps(({ renderable, kibana }) => ({ - renderFunction: kibana.services.expressions.getRenderer(get(renderable, 'as')), + withServices, + withProps(({ renderable, services }) => ({ + renderFunction: services.expressions.getRenderer(get(renderable, 'as')), })) )(Component); diff --git a/x-pack/plugins/canvas/public/components/embeddable_flyout/flyout.tsx b/x-pack/plugins/canvas/public/components/embeddable_flyout/flyout.tsx index df9dad3e7f678..0b5bd8adf8cb9 100644 --- a/x-pack/plugins/canvas/public/components/embeddable_flyout/flyout.tsx +++ b/x-pack/plugins/canvas/public/components/embeddable_flyout/flyout.tsx @@ -4,15 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import React from 'react'; +import React, { FC } from 'react'; import { EuiFlyout, EuiFlyoutHeader, EuiFlyoutBody, EuiTitle } from '@elastic/eui'; import { SavedObjectFinderUi, SavedObjectMetaData, } from '../../../../../../src/plugins/saved_objects/public/'; import { ComponentStrings } from '../../../i18n'; -import { CoreStart } from '../../../../../../src/core/public'; -import { CanvasStartDeps } from '../../plugin'; +import { useServices } from '../../services'; const { AddEmbeddableFlyout: strings } = ComponentStrings; @@ -20,14 +19,16 @@ export interface Props { onClose: () => void; onSelect: (id: string, embeddableType: string) => void; availableEmbeddables: string[]; - savedObjects: CoreStart['savedObjects']; - uiSettings: CoreStart['uiSettings']; - getEmbeddableFactories: CanvasStartDeps['embeddable']['getEmbeddableFactories']; } -export class AddEmbeddableFlyout extends React.Component { - onAddPanel = (id: string, savedObjectType: string, name: string) => { - const embeddableFactories = this.props.getEmbeddableFactories(); +export const AddEmbeddableFlyout: FC = ({ onSelect, availableEmbeddables, onClose }) => { + const services = useServices(); + const { embeddables, platform } = services; + const { getEmbeddableFactories } = embeddables; + const { getSavedObjects, getUISettings } = platform; + + const onAddPanel = (id: string, savedObjectType: string, name: string) => { + const embeddableFactories = getEmbeddableFactories(); // Find the embeddable type from the saved object type const found = Array.from(embeddableFactories).find((embeddableFactory) => { @@ -39,41 +40,39 @@ export class AddEmbeddableFlyout extends React.Component { const foundEmbeddableType = found ? found.type : 'unknown'; - this.props.onSelect(id, foundEmbeddableType); + onSelect(id, foundEmbeddableType); }; - render() { - const embeddableFactories = this.props.getEmbeddableFactories(); + const embeddableFactories = getEmbeddableFactories(); - const availableSavedObjects = Array.from(embeddableFactories) - .filter((factory) => { - return this.props.availableEmbeddables.includes(factory.type); - }) - .map((factory) => factory.savedObjectMetaData) - .filter>(function ( - maybeSavedObjectMetaData - ): maybeSavedObjectMetaData is SavedObjectMetaData<{}> { - return maybeSavedObjectMetaData !== undefined; - }); + const availableSavedObjects = Array.from(embeddableFactories) + .filter((factory) => { + return availableEmbeddables.includes(factory.type); + }) + .map((factory) => factory.savedObjectMetaData) + .filter>(function ( + maybeSavedObjectMetaData + ): maybeSavedObjectMetaData is SavedObjectMetaData<{}> { + return maybeSavedObjectMetaData !== undefined; + }); - return ( - - - -

{strings.getTitleText()}

- - - - - - - ); - } -} + return ( + + + +

{strings.getTitleText()}

+
+
+ + + +
+ ); +}; diff --git a/x-pack/plugins/canvas/public/components/embeddable_flyout/index.tsx b/x-pack/plugins/canvas/public/components/embeddable_flyout/index.tsx index 9462ba0411de4..62a073daf4c59 100644 --- a/x-pack/plugins/canvas/public/components/embeddable_flyout/index.tsx +++ b/x-pack/plugins/canvas/public/components/embeddable_flyout/index.tsx @@ -14,8 +14,6 @@ import { AddEmbeddableFlyout, Props } from './flyout'; import { addElement } from '../../state/actions/elements'; import { getSelectedPage } from '../../state/selectors/workpad'; import { EmbeddableTypes } from '../../../canvas_plugin_src/expression_types/embeddable'; -import { WithKibanaProps } from '../../index'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; const allowedEmbeddables = { [EmbeddableTypes.map]: (id: string) => { @@ -74,10 +72,10 @@ const mergeProps = ( }; }; -export class EmbeddableFlyoutPortal extends React.Component { +export class EmbeddableFlyoutPortal extends React.Component { el?: HTMLElement; - constructor(props: Props & WithKibanaProps) { + constructor(props: Props) { super(props); this.el = document.createElement('div'); @@ -103,9 +101,6 @@ export class EmbeddableFlyoutPortal extends React.Component, this.el ); @@ -113,7 +108,6 @@ export class EmbeddableFlyoutPortal extends React.Component void }>( - connect(mapStateToProps, mapDispatchToProps, mergeProps), - withKibana +export const AddEmbeddablePanel = compose void }>( + connect(mapStateToProps, mapDispatchToProps, mergeProps) )(EmbeddableFlyoutPortal); diff --git a/x-pack/plugins/canvas/public/components/expression/index.js b/x-pack/plugins/canvas/public/components/expression/index.js index 4480169dd037d..146acbcd6c6ee 100644 --- a/x-pack/plugins/canvas/public/components/expression/index.js +++ b/x-pack/plugins/canvas/public/components/expression/index.js @@ -15,7 +15,7 @@ import { renderComponent, } from 'recompose'; import { fromExpression } from '@kbn/interpreter/common'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; +import { withServices } from '../../services'; import { getSelectedPage, getSelectedElement } from '../../state/selectors/workpad'; import { setExpression, flushContext } from '../../state/actions/elements'; import { ElementNotSelected } from './element_not_selected'; @@ -46,7 +46,7 @@ const mergeProps = (stateProps, dispatchProps, ownProps) => { const { expression } = element; - const functions = Object.values(allProps.kibana.services.expressions.getFunctions()); + const functions = Object.values(allProps.services.expressions.getFunctions()); return { ...allProps, @@ -71,7 +71,7 @@ const expressionLifecycle = lifecycle({ }); export const Expression = compose( - withKibana, + withServices, connect(mapStateToProps, mapDispatchToProps, mergeProps), withState('formState', 'setFormState', ({ expression }) => ({ expression, diff --git a/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx b/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx index bc51128cf0c87..7939c1d04631a 100644 --- a/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx +++ b/x-pack/plugins/canvas/public/components/render_with_fn/render_with_fn.tsx @@ -7,7 +7,7 @@ import React, { useState, useEffect, useRef, FC, useCallback } from 'react'; import { useDebounce } from 'react-use'; -import { useKibana } from '../../../../../../src/plugins/kibana_react/public'; +import { useNotifyService } from '../../services'; import { RenderToDom } from '../render_to_dom'; import { ErrorStrings } from '../../../i18n'; import { RendererHandlers } from '../../../types'; @@ -39,8 +39,7 @@ export const RenderWithFn: FC = ({ width, height, }) => { - const { services } = useKibana(); - const onError = services.canvas.notify.error; + const { error: onError } = useNotifyService(); const [domNode, setDomNode] = useState(null); diff --git a/x-pack/plugins/canvas/public/components/saved_elements_modal/index.ts b/x-pack/plugins/canvas/public/components/saved_elements_modal/index.ts index c5c1dbc2fdd6e..da2955c146193 100644 --- a/x-pack/plugins/canvas/public/components/saved_elements_modal/index.ts +++ b/x-pack/plugins/canvas/public/components/saved_elements_modal/index.ts @@ -10,8 +10,7 @@ import { compose, withState } from 'recompose'; import { camelCase } from 'lodash'; import { cloneSubgraphs } from '../../lib/clone_subgraphs'; import * as customElementService from '../../lib/custom_element_service'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; -import { WithKibanaProps } from '../../'; +import { withServices, WithServicesProps } from '../../services'; // @ts-expect-error untyped local import { selectToplevelNodes } from '../../state/actions/transient'; // @ts-expect-error untyped local @@ -63,7 +62,7 @@ const mapDispatchToProps = (dispatch: Dispatch): DispatchProps => ({ const mergeProps = ( stateProps: StateProps, dispatchProps: DispatchProps, - ownProps: OwnPropsWithState & WithKibanaProps + ownProps: OwnPropsWithState & WithServicesProps ): ComponentProps => { const { pageId } = stateProps; const { onClose, search, setCustomElements } = ownProps; @@ -91,7 +90,7 @@ const mergeProps = ( try { await findCustomElements(); } catch (err) { - ownProps.kibana.services.canvas.notify.error(err, { + ownProps.services.notify.error(err, { title: `Couldn't find custom elements`, }); } @@ -102,7 +101,7 @@ const mergeProps = ( await customElementService.remove(id); await findCustomElements(); } catch (err) { - ownProps.kibana.services.canvas.notify.error(err, { + ownProps.services.notify.error(err, { title: `Couldn't delete custom elements`, }); } @@ -118,7 +117,7 @@ const mergeProps = ( }); await findCustomElements(); } catch (err) { - ownProps.kibana.services.canvas.notify.error(err, { + ownProps.services.notify.error(err, { title: `Couldn't update custom elements`, }); } @@ -127,7 +126,7 @@ const mergeProps = ( }; export const SavedElementsModal = compose( - withKibana, + withServices, withState('search', 'setSearch', ''), withState('customElements', 'setCustomElements', []), connect(mapStateToProps, mapDispatchToProps, mergeProps) diff --git a/x-pack/plugins/canvas/public/components/var_config/index.tsx b/x-pack/plugins/canvas/public/components/var_config/index.tsx index 526037b79e0e0..ca40bd07877f0 100644 --- a/x-pack/plugins/canvas/public/components/var_config/index.tsx +++ b/x-pack/plugins/canvas/public/components/var_config/index.tsx @@ -7,27 +7,19 @@ import React, { FC } from 'react'; import copy from 'copy-to-clipboard'; import { VarConfig as ChildComponent } from './var_config'; -import { - withKibana, - KibanaReactContextValue, - KibanaServices, -} from '../../../../../../src/plugins/kibana_react/public'; -import { CanvasServices } from '../../services'; - +import { useNotifyService } from '../../services'; import { ComponentStrings } from '../../../i18n'; - import { CanvasVariable } from '../../../types'; const { VarConfig: strings } = ComponentStrings; interface Props { - kibana: KibanaReactContextValue<{ canvas: CanvasServices } & KibanaServices>; - variables: CanvasVariable[]; setVariables: (variables: CanvasVariable[]) => void; } -const WrappedComponent: FC = ({ kibana, variables, setVariables }) => { +export const VarConfig: FC = ({ variables, setVariables }) => { + const { success } = useNotifyService(); const onDeleteVar = (v: CanvasVariable) => { const index = variables.findIndex((targetVar: CanvasVariable) => { return targetVar.name === v.name; @@ -36,15 +28,14 @@ const WrappedComponent: FC = ({ kibana, variables, setVariables }) => { const newVars = [...variables]; newVars.splice(index, 1); setVariables(newVars); - - kibana.services.canvas.notify.success(strings.getDeleteNotificationDescription()); + success(strings.getDeleteNotificationDescription()); } }; const onCopyVar = (v: CanvasVariable) => { const snippetStr = `{var "${v.name}"}`; copy(snippetStr, { debug: true }); - kibana.services.canvas.notify.success(strings.getCopyNotificationDescription()); + success(strings.getCopyNotificationDescription()); }; const onAddVar = (v: CanvasVariable) => { @@ -62,5 +53,3 @@ const WrappedComponent: FC = ({ kibana, variables, setVariables }) => { return ; }; - -export const VarConfig = withKibana(WrappedComponent); diff --git a/x-pack/plugins/canvas/public/components/workpad_header/element_menu/index.tsx b/x-pack/plugins/canvas/public/components/workpad_header/element_menu/index.tsx index 13b2cace13a40..264873fc994dd 100644 --- a/x-pack/plugins/canvas/public/components/workpad_header/element_menu/index.tsx +++ b/x-pack/plugins/canvas/public/components/workpad_header/element_menu/index.tsx @@ -8,7 +8,6 @@ import React from 'react'; import { connect } from 'react-redux'; import { compose, withProps } from 'recompose'; import { Dispatch } from 'redux'; -import { withKibana } from '../../../../../../../src/plugins/kibana_react/public/'; import { State, ElementSpec } from '../../../../types'; // @ts-expect-error untyped local import { elementsRegistry } from '../../../lib/elements_registry'; @@ -44,6 +43,5 @@ const mergeProps = (stateProps: StateProps, dispatchProps: DispatchProps) => ({ export const ElementMenu = compose( connect(mapStateToProps, mapDispatchToProps, mergeProps), - withKibana, withProps(() => ({ elements: elementsRegistry.toJS() })) )(Component); diff --git a/x-pack/plugins/canvas/public/components/workpad_header/share_menu/index.ts b/x-pack/plugins/canvas/public/components/workpad_header/share_menu/index.ts index 17fcc50334a8f..01bcfebc0dba9 100644 --- a/x-pack/plugins/canvas/public/components/workpad_header/share_menu/index.ts +++ b/x-pack/plugins/canvas/public/components/workpad_header/share_menu/index.ts @@ -13,8 +13,7 @@ import { downloadWorkpad } from '../../../lib/download_workpad'; import { ShareMenu as Component, Props as ComponentProps } from './share_menu'; import { getPdfUrl, createPdf } from './utils'; import { State, CanvasWorkpad } from '../../../../types'; -import { withKibana } from '../../../../../../../src/plugins/kibana_react/public/'; -import { WithKibanaProps } from '../../../index'; +import { withServices, WithServicesProps } from '../../../services'; import { ComponentStrings } from '../../../../i18n'; @@ -43,12 +42,16 @@ interface Props { export const ShareMenu = compose( connect(mapStateToProps), - withKibana, + withServices, withProps( - ({ workpad, pageCount, kibana }: Props & WithKibanaProps): ComponentProps => ({ + ({ workpad, pageCount, services }: Props & WithServicesProps): ComponentProps => ({ getExportUrl: (type) => { if (type === 'pdf') { - const pdfUrl = getPdfUrl(workpad, { pageCount }, kibana.services.http.basePath); + const pdfUrl = getPdfUrl( + workpad, + { pageCount }, + services.platform.getBasePathInterface() + ); return getAbsoluteUrl(pdfUrl); } @@ -57,10 +60,10 @@ export const ShareMenu = compose( onCopy: (type) => { switch (type) { case 'pdf': - kibana.services.canvas.notify.info(strings.getCopyPDFMessage()); + services.notify.info(strings.getCopyPDFMessage()); break; case 'reportingConfig': - kibana.services.canvas.notify.info(strings.getCopyReportingConfigMessage()); + services.notify.info(strings.getCopyReportingConfigMessage()); break; default: throw new Error(strings.getUnknownExportErrorMessage(type)); @@ -69,9 +72,9 @@ export const ShareMenu = compose( onExport: (type) => { switch (type) { case 'pdf': - return createPdf(workpad, { pageCount }, kibana.services.http.basePath) + return createPdf(workpad, { pageCount }, services.platform.getBasePathInterface()) .then(({ data }: { data: { job: { id: string } } }) => { - kibana.services.canvas.notify.info(strings.getExportPDFMessage(), { + services.notify.info(strings.getExportPDFMessage(), { title: strings.getExportPDFTitle(workpad.name), }); @@ -79,7 +82,7 @@ export const ShareMenu = compose( jobCompletionNotifications.add(data.job.id); }) .catch((err: Error) => { - kibana.services.canvas.notify.error(err, { + services.notify.error(err, { title: strings.getExportPDFErrorTitle(workpad.name), }); }); diff --git a/x-pack/plugins/canvas/public/components/workpad_header/view_menu/index.ts b/x-pack/plugins/canvas/public/components/workpad_header/view_menu/index.ts index ddf1a12775cae..e2a05d13b017e 100644 --- a/x-pack/plugins/canvas/public/components/workpad_header/view_menu/index.ts +++ b/x-pack/plugins/canvas/public/components/workpad_header/view_menu/index.ts @@ -7,7 +7,6 @@ import { connect } from 'react-redux'; import { compose, withHandlers } from 'recompose'; import { Dispatch } from 'redux'; -import { withKibana } from '../../../../../../../src/plugins/kibana_react/public/'; import { zoomHandlerCreators } from '../../../lib/app_handler_creators'; import { State, CanvasWorkpadBoundingBox } from '../../../../types'; // @ts-expect-error untyped local @@ -97,6 +96,5 @@ const mergeProps = ( export const ViewMenu = compose( connect(mapStateToProps, mapDispatchToProps, mergeProps), - withKibana, withHandlers(zoomHandlerCreators) )(Component); diff --git a/x-pack/plugins/canvas/public/components/workpad_loader/index.js b/x-pack/plugins/canvas/public/components/workpad_loader/index.js index ab07d5d722405..f747cb677a576 100644 --- a/x-pack/plugins/canvas/public/components/workpad_loader/index.js +++ b/x-pack/plugins/canvas/public/components/workpad_loader/index.js @@ -14,7 +14,7 @@ import { getWorkpad } from '../../state/selectors/workpad'; import { getId } from '../../lib/get_id'; import { downloadWorkpad } from '../../lib/download_workpad'; import { ComponentStrings, ErrorStrings } from '../../../i18n'; -import { withKibana } from '../../../../../../src/plugins/kibana_react/public'; +import { withServices } from '../../services'; import { WorkpadLoader as Component } from './workpad_loader'; const { WorkpadLoader: strings } = ComponentStrings; @@ -31,11 +31,11 @@ export const WorkpadLoader = compose( }), connect(mapStateToProps), withState('workpads', 'setWorkpads', null), - withKibana, - withProps(({ kibana }) => ({ - notify: kibana.services.canvas.notify, + withServices, + withProps(({ services }) => ({ + notify: services.notify, })), - withHandlers(({ kibana }) => ({ + withHandlers(({ services }) => ({ // Workpad creation via navigation createWorkpad: (props) => async (workpad) => { // workpad data uploaded, create and load it @@ -44,7 +44,7 @@ export const WorkpadLoader = compose( await workpadService.create(workpad); props.router.navigateTo('loadWorkpad', { id: workpad.id, page: 1 }); } catch (err) { - kibana.services.canvas.notify.error(err, { + services.notify.error(err, { title: errors.getUploadFailureErrorMessage(), }); } @@ -60,7 +60,7 @@ export const WorkpadLoader = compose( const workpads = await workpadService.find(text); setWorkpads(workpads); } catch (err) { - kibana.services.canvas.notify.error(err, { title: errors.getFindFailureErrorMessage() }); + services.notify.error(err, { title: errors.getFindFailureErrorMessage() }); } }, @@ -76,7 +76,7 @@ export const WorkpadLoader = compose( await workpadService.create(workpad); props.router.navigateTo('loadWorkpad', { id: workpad.id, page: 1 }); } catch (err) { - kibana.services.canvas.notify.error(err, { title: errors.getCloneFailureErrorMessage() }); + services.notify.error(err, { title: errors.getCloneFailureErrorMessage() }); } }, @@ -122,7 +122,7 @@ export const WorkpadLoader = compose( }; if (errored.length > 0) { - kibana.services.canvas.notify.error(errors.getDeleteFailureErrorMessage()); + services.notify.error(errors.getDeleteFailureErrorMessage()); } setWorkpads(workpadState); @@ -137,7 +137,7 @@ export const WorkpadLoader = compose( })), withProps((props) => ({ formatDate: (date) => { - const dateFormat = props.kibana.services.uiSettings.get('dateFormat'); + const dateFormat = props.services.platform.getUISetting('dateFormat'); return date && moment(date).format(dateFormat); }, })) diff --git a/x-pack/plugins/canvas/public/components/workpad_templates/index.tsx b/x-pack/plugins/canvas/public/components/workpad_templates/index.tsx index f35bba3fd598d..35b0e2bb19e3e 100644 --- a/x-pack/plugins/canvas/public/components/workpad_templates/index.tsx +++ b/x-pack/plugins/canvas/public/components/workpad_templates/index.tsx @@ -10,12 +10,11 @@ import { RouterContext } from '../router'; import { ComponentStrings } from '../../../i18n/components'; // @ts-expect-error import * as workpadService from '../../lib/workpad_service'; -import { useKibana } from '../../../../../../src/plugins/kibana_react/public'; import { WorkpadTemplates as Component } from './workpad_templates'; import { CanvasTemplate } from '../../../types'; -import { UseKibanaProps } from '../../'; import { list } from '../../lib/template_service'; import { applyTemplateStrings } from '../../../i18n/templates/apply_strings'; +import { useNotifyService } from '../../services'; interface WorkpadTemplatesProps { onClose: () => void; @@ -33,7 +32,7 @@ export const WorkpadTemplates: FunctionComponent = ({ onC const [creatingFromTemplateName, setCreatingFromTemplateName] = useState( undefined ); - const kibana = useKibana(); + const { error } = useNotifyService(); useEffect(() => { if (!templates) { @@ -60,9 +59,9 @@ export const WorkpadTemplates: FunctionComponent = ({ onC if (router) { router.navigateTo('loadWorkpad', { id: result.data.id, page: 1 }); } - } catch (error) { + } catch (e) { setCreatingFromTemplateName(undefined); - kibana.services.canvas.notify.error(error, { + error(e, { title: `Couldn't create workpad from template`, }); } diff --git a/x-pack/plugins/canvas/public/lib/breadcrumbs.ts b/x-pack/plugins/canvas/public/lib/breadcrumbs.ts index 96412ef50c79d..b613bb7fcdaf1 100644 --- a/x-pack/plugins/canvas/public/lib/breadcrumbs.ts +++ b/x-pack/plugins/canvas/public/lib/breadcrumbs.ts @@ -24,6 +24,5 @@ export const getWorkpadBreadcrumb = ({ }; export const setBreadcrumb = (paths: ChromeBreadcrumb | ChromeBreadcrumb[]) => { - const setBreadCrumbs = platformService.getService().coreStart.chrome.setBreadcrumbs; - setBreadCrumbs(Array.isArray(paths) ? paths : [paths]); + platformService.getService().setBreadcrumbs(Array.isArray(paths) ? paths : [paths]); }; diff --git a/x-pack/plugins/canvas/public/lib/custom_element_service.ts b/x-pack/plugins/canvas/public/lib/custom_element_service.ts index 25c3b78a2746e..f240df93d0387 100644 --- a/x-pack/plugins/canvas/public/lib/custom_element_service.ts +++ b/x-pack/plugins/canvas/public/lib/custom_element_service.ts @@ -11,7 +11,7 @@ import { CustomElement } from '../../types'; import { platformService } from '../services'; const getApiPath = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return `${basePath}${API_ROUTE_CUSTOM_ELEMENT}`; }; diff --git a/x-pack/plugins/canvas/public/lib/documentation_links.ts b/x-pack/plugins/canvas/public/lib/documentation_links.ts index 6430f7d87d4f7..cb19389291028 100644 --- a/x-pack/plugins/canvas/public/lib/documentation_links.ts +++ b/x-pack/plugins/canvas/public/lib/documentation_links.ts @@ -7,10 +7,14 @@ import { platformService } from '../services'; export const getDocumentationLinks = () => ({ - canvas: `${platformService.getService().coreStart.docLinks.ELASTIC_WEBSITE_URL}guide/en/kibana/${ - platformService.getService().coreStart.docLinks.DOC_LINK_VERSION - }/canvas.html`, - numeral: `${platformService.getService().coreStart.docLinks.ELASTIC_WEBSITE_URL}guide/en/kibana/${ - platformService.getService().coreStart.docLinks.DOC_LINK_VERSION - }/guide/numeral.html`, + canvas: `${platformService + .getService() + .getElasticWebsiteUrl()}guide/en/kibana/${platformService + .getService() + .getDocLinkVersion()}/canvas.html`, + numeral: `${platformService + .getService() + .getElasticWebsiteUrl()}guide/en/kibana/${platformService + .getService() + .getDocLinkVersion()}/guide/numeral.html`, }); diff --git a/x-pack/plugins/canvas/public/lib/es_service.ts b/x-pack/plugins/canvas/public/lib/es_service.ts index 5c1131d5fbe35..fee66c71636c8 100644 --- a/x-pack/plugins/canvas/public/lib/es_service.ts +++ b/x-pack/plugins/canvas/public/lib/es_service.ts @@ -15,16 +15,16 @@ import { platformService } from '../services'; const { esService: strings } = ErrorStrings; const getApiPath = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return basePath + API_ROUTE; }; const getSavedObjectsClient = function () { - return platformService.getService().coreStart.savedObjects.client; + return platformService.getService().getSavedObjectsClient(); }; const getAdvancedSettings = function () { - return platformService.getService().coreStart.uiSettings; + return platformService.getService().getUISettings(); }; export const getFields = (index = '_all') => { diff --git a/x-pack/plugins/canvas/public/lib/template_service.ts b/x-pack/plugins/canvas/public/lib/template_service.ts index 98d582c854e36..185b2ec37ba95 100644 --- a/x-pack/plugins/canvas/public/lib/template_service.ts +++ b/x-pack/plugins/canvas/public/lib/template_service.ts @@ -10,7 +10,7 @@ import { platformService } from '../services'; import { CanvasTemplate } from '../../types'; const getApiPath = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return `${basePath}${API_ROUTE_TEMPLATES}`; }; diff --git a/x-pack/plugins/canvas/public/lib/workpad_service.js b/x-pack/plugins/canvas/public/lib/workpad_service.js index 2047e20424acc..27efe25405fd7 100644 --- a/x-pack/plugins/canvas/public/lib/workpad_service.js +++ b/x-pack/plugins/canvas/public/lib/workpad_service.js @@ -12,6 +12,7 @@ import { } from '../../common/lib/constants'; import { fetch } from '../../common/lib/fetch'; import { platformService } from '../services'; + /* Remove any top level keys from the workpad which will be rejected by validation */ @@ -44,17 +45,17 @@ const sanitizeWorkpad = function (workpad) { }; const getApiPath = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return `${basePath}${API_ROUTE_WORKPAD}`; }; const getApiPathStructures = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return `${basePath}${API_ROUTE_WORKPAD_STRUCTURES}`; }; const getApiPathAssets = function () { - const basePath = platformService.getService().coreStart.http.basePath.get(); + const basePath = platformService.getService().getBasePath(); return `${basePath}${API_ROUTE_WORKPAD_ASSETS}`; }; diff --git a/x-pack/plugins/canvas/public/services/context.tsx b/x-pack/plugins/canvas/public/services/context.tsx new file mode 100644 index 0000000000000..9bd86ef98f1e3 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/context.tsx @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { + useContext, + createElement, + createContext, + ComponentType, + FC, + ReactElement, +} from 'react'; +import { CanvasServices, CanvasServiceProviders } from '.'; + +export interface WithServicesProps { + services: CanvasServices; +} + +const defaultContextValue = { + embeddables: {}, + expressions: {}, + notify: {}, + platform: {}, + navLink: {}, +}; + +const context = createContext(defaultContextValue as CanvasServices); + +export const useServices = () => useContext(context); +export const usePlatformService = () => useServices().platform; +export const useEmbeddablesService = () => useServices().embeddables; +export const useExpressionsService = () => useServices().expressions; +export const useNotifyService = () => useServices().notify; +export const useNavLinkService = () => useServices().navLink; + +export const withServices = (type: ComponentType) => { + const EnhancedType: FC = (props) => { + const services = useServices(); + return createElement(type, { ...props, services }); + }; + return EnhancedType; +}; + +export const ServicesProvider: FC<{ + providers: CanvasServiceProviders; + children: ReactElement; +}> = ({ providers, children }) => { + const value = { + embeddables: providers.embeddables.getService(), + expressions: providers.expressions.getService(), + notify: providers.notify.getService(), + platform: providers.platform.getService(), + navLink: providers.navLink.getService(), + }; + return {children}; +}; diff --git a/x-pack/plugins/canvas/public/services/embeddables.ts b/x-pack/plugins/canvas/public/services/embeddables.ts new file mode 100644 index 0000000000000..13e308effcdba --- /dev/null +++ b/x-pack/plugins/canvas/public/services/embeddables.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EmbeddableFactory } from '../../../../../src/plugins/embeddable/public'; +import { CanvasServiceFactory } from '.'; + +export interface EmbeddablesService { + getEmbeddableFactories: () => IterableIterator; +} + +export const embeddablesServiceFactory: CanvasServiceFactory = async ( + _coreSetup, + _coreStart, + _setupPlugins, + startPlugins +) => ({ + getEmbeddableFactories: startPlugins.embeddable.getEmbeddableFactories, +}); diff --git a/x-pack/plugins/canvas/public/services/expressions.ts b/x-pack/plugins/canvas/public/services/expressions.ts index 16f939a9c97fc..1376aab0ca8b9 100644 --- a/x-pack/plugins/canvas/public/services/expressions.ts +++ b/x-pack/plugins/canvas/public/services/expressions.ts @@ -14,6 +14,5 @@ export const expressionsServiceFactory: CanvasServiceFactory startPlugins ) => { await setupPlugins.expressions.__LEGACY.loadLegacyServerFunctionWrappers(); - return setupPlugins.expressions.fork(); }; diff --git a/x-pack/plugins/canvas/public/services/index.ts b/x-pack/plugins/canvas/public/services/index.ts index a929b4639d3e4..700d874d4507d 100644 --- a/x-pack/plugins/canvas/public/services/index.ts +++ b/x-pack/plugins/canvas/public/services/index.ts @@ -10,8 +10,16 @@ import { CanvasSetupDeps, CanvasStartDeps } from '../plugin'; import { notifyServiceFactory } from './notify'; import { platformServiceFactory } from './platform'; import { navLinkServiceFactory } from './nav_link'; +import { embeddablesServiceFactory } from './embeddables'; import { expressionsServiceFactory } from './expressions'; +export { NotifyService } from './notify'; +export { PlatformService } from './platform'; +export { NavLinkService } from './nav_link'; +export { EmbeddablesService } from './embeddables'; +export { ExpressionsService } from '../../../../../src/plugins/expressions/common'; +export * from './context'; + export type CanvasServiceFactory = ( coreSetup: CoreSetup, coreStart: CoreStart, @@ -28,6 +36,10 @@ class CanvasServiceProvider { this.factory = factory; } + setService(service: Service) { + this.service = service; + } + async start( coreSetup: CoreSetup, coreStart: CoreStart, @@ -60,13 +72,17 @@ class CanvasServiceProvider { export type ServiceFromProvider

= P extends CanvasServiceProvider ? T : never; export const services = { + embeddables: new CanvasServiceProvider(embeddablesServiceFactory), expressions: new CanvasServiceProvider(expressionsServiceFactory), notify: new CanvasServiceProvider(notifyServiceFactory), platform: new CanvasServiceProvider(platformServiceFactory), navLink: new CanvasServiceProvider(navLinkServiceFactory), }; +export type CanvasServiceProviders = typeof services; + export interface CanvasServices { + embeddables: ServiceFromProvider; expressions: ServiceFromProvider; notify: ServiceFromProvider; platform: ServiceFromProvider; @@ -88,10 +104,11 @@ export const startServices = async ( }; export const stopServices = () => { - Object.entries(services).forEach(([key, provider]) => provider.stop()); + Object.values(services).forEach((provider) => provider.stop()); }; export const { + embeddables: embeddableService, notify: notifyService, platform: platformService, navLink: navLinkService, diff --git a/x-pack/plugins/canvas/public/services/nav_link.ts b/x-pack/plugins/canvas/public/services/nav_link.ts index 68d685242351b..532b5264ee9ed 100644 --- a/x-pack/plugins/canvas/public/services/nav_link.ts +++ b/x-pack/plugins/canvas/public/services/nav_link.ts @@ -8,15 +8,15 @@ import { CanvasServiceFactory } from '.'; import { SESSIONSTORAGE_LASTPATH } from '../../common/lib/constants'; import { getSessionStorage } from '../lib/storage'; -interface NavLinkService { +export interface NavLinkService { updatePath: (path: string) => void; } export const navLinkServiceFactory: CanvasServiceFactory = ( coreSetup, - coreStart, - setupPlugins, - startPlugins, + _coreStart, + _setupPlugins, + _startPlugins, appUpdater ) => { return { diff --git a/x-pack/plugins/canvas/public/services/notify.ts b/x-pack/plugins/canvas/public/services/notify.ts index 5454a0f87c3f0..819525c8fa922 100644 --- a/x-pack/plugins/canvas/public/services/notify.ts +++ b/x-pack/plugins/canvas/public/services/notify.ts @@ -26,7 +26,7 @@ const getToast = (err: Error | string, opts: ToastInputFields = {}) => { }; }; -interface NotifyService { +export interface NotifyService { error: (err: string | Error, opts?: ToastInputFields) => void; warning: (err: string | Error, opts?: ToastInputFields) => void; info: (err: string | Error, opts?: ToastInputFields) => void; diff --git a/x-pack/plugins/canvas/public/services/platform.ts b/x-pack/plugins/canvas/public/services/platform.ts index 440e9523044c1..92c378e9aa597 100644 --- a/x-pack/plugins/canvas/public/services/platform.ts +++ b/x-pack/plugins/canvas/public/services/platform.ts @@ -4,21 +4,52 @@ * you may not use this file except in compliance with the Elastic License. */ +import { + SavedObjectsStart, + SavedObjectsClientContract, + IUiSettingsClient, + ChromeBreadcrumb, + IBasePath, +} from '../../../../../src/core/public'; import { CanvasServiceFactory } from '.'; -import { CoreStart, CoreSetup, CanvasSetupDeps, CanvasStartDeps } from '../plugin'; -interface PlatformService { - coreSetup: CoreSetup; - coreStart: CoreStart; - setupPlugins: CanvasSetupDeps; - startPlugins: CanvasStartDeps; +export interface PlatformService { + getBasePath: () => string; + getBasePathInterface: () => IBasePath; + getDocLinkVersion: () => string; + getElasticWebsiteUrl: () => string; + getHasWriteAccess: () => boolean; + getUISetting: (key: string, defaultValue?: any) => any; + setBreadcrumbs: (newBreadcrumbs: ChromeBreadcrumb[]) => void; + setRecentlyAccessed: (link: string, label: string, id: string) => void; + + // TODO: these should go away. We want thin accessors, not entire objects. + // Entire objects are hard to mock, and hide our dependency on the external service. + getSavedObjects: () => SavedObjectsStart; + getSavedObjectsClient: () => SavedObjectsClientContract; + getUISettings: () => IUiSettingsClient; } export const platformServiceFactory: CanvasServiceFactory = ( - coreSetup, - coreStart, - setupPlugins, - startPlugins + _coreSetup, + coreStart ) => { - return { coreSetup, coreStart, setupPlugins, startPlugins }; + return { + getBasePath: coreStart.http.basePath.get, + getBasePathInterface: () => coreStart.http.basePath, + getElasticWebsiteUrl: () => coreStart.docLinks.ELASTIC_WEBSITE_URL, + getDocLinkVersion: () => coreStart.docLinks.DOC_LINK_VERSION, + // TODO: is there a better type for this? The capabilities type allows for a Record, + // though we don't do this. So this cast may be the best option. + getHasWriteAccess: () => coreStart.application.capabilities.canvas.save as boolean, + getUISetting: coreStart.uiSettings.get.bind(coreStart.uiSettings), + setBreadcrumbs: coreStart.chrome.setBreadcrumbs, + setRecentlyAccessed: coreStart.chrome.recentlyAccessed.add, + + // TODO: these should go away. We want thin accessors, not entire objects. + // Entire objects are hard to mock, and hide our dependency on the external service. + getSavedObjects: () => coreStart.savedObjects, + getSavedObjectsClient: () => coreStart.savedObjects.client, + getUISettings: () => coreStart.uiSettings, + }; }; diff --git a/x-pack/plugins/canvas/public/services/stubs/embeddables.ts b/x-pack/plugins/canvas/public/services/stubs/embeddables.ts new file mode 100644 index 0000000000000..48100da462dd5 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/embeddables.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { EmbeddablesService } from '../embeddables'; + +const noop = (..._args: any[]): any => {}; + +export const embeddablesService: EmbeddablesService = { + getEmbeddableFactories: noop, +}; diff --git a/x-pack/plugins/canvas/public/services/stubs/expressions.ts b/x-pack/plugins/canvas/public/services/stubs/expressions.ts new file mode 100644 index 0000000000000..26a90670106d0 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/expressions.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ExpressionsService } from '../'; +import { + plugin, + ExpressionRenderDefinition, +} from '../../../../../../src/plugins/expressions/public'; +import { functions as functionDefinitions } from '../../../canvas_plugin_src/functions/common'; +// @ts-expect-error untyped local +import { renderFunctions } from '../../../canvas_plugin_src/renderers/core'; + +const placeholder = {} as any; +const expressionsPlugin = plugin(placeholder); +const setup = expressionsPlugin.setup(placeholder, { + inspector: {}, +} as any); + +export const expressionsService: ExpressionsService = setup.fork(); + +functionDefinitions.forEach((fn) => expressionsService.registerFunction(fn)); +renderFunctions.forEach((fn: ExpressionRenderDefinition) => + expressionsService.registerRenderer(fn) +); diff --git a/x-pack/plugins/canvas/public/services/stubs/index.ts b/x-pack/plugins/canvas/public/services/stubs/index.ts new file mode 100644 index 0000000000000..b4e440f204cc7 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/index.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { CanvasServices, services } from '../'; +import { embeddablesService } from './embeddables'; +import { expressionsService } from './expressions'; +import { navLinkService } from './nav_link'; +import { notifyService } from './notify'; +import { platformService } from './platform'; + +export const stubs: CanvasServices = { + embeddables: embeddablesService, + expressions: expressionsService, + navLink: navLinkService, + notify: notifyService, + platform: platformService, +}; + +export const startServices = async (providedServices: Partial = {}) => { + Object.entries(services).forEach(([key, provider]) => { + // @ts-expect-error Object.entries isn't strongly typed + const stub = providedServices[key] || stubs[key]; + provider.setService(stub); + }); +}; diff --git a/x-pack/plugins/canvas/public/services/stubs/nav_link.ts b/x-pack/plugins/canvas/public/services/stubs/nav_link.ts new file mode 100644 index 0000000000000..3b40eeb3e84f2 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/nav_link.ts @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { NavLinkService } from '../nav_link'; + +const noop = (..._args: any[]): any => {}; + +export const navLinkService: NavLinkService = { + updatePath: noop, +}; diff --git a/x-pack/plugins/canvas/public/services/stubs/notify.ts b/x-pack/plugins/canvas/public/services/stubs/notify.ts new file mode 100644 index 0000000000000..38eac2a5813eb --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/notify.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { NotifyService } from '../notify'; + +const noop = (..._args: any[]): any => {}; + +export const notifyService: NotifyService = { + error: noop, + info: noop, + success: noop, + warning: noop, +}; diff --git a/x-pack/plugins/canvas/public/services/stubs/platform.ts b/x-pack/plugins/canvas/public/services/stubs/platform.ts new file mode 100644 index 0000000000000..9ada579573502 --- /dev/null +++ b/x-pack/plugins/canvas/public/services/stubs/platform.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PlatformService } from '../platform'; + +const noop = (..._args: any[]): any => {}; + +export const platformService: PlatformService = { + getBasePath: () => '/base/path', + getBasePathInterface: noop, + getDocLinkVersion: () => 'dockLinkVersion', + getElasticWebsiteUrl: () => 'https://elastic.co', + getHasWriteAccess: () => true, + getUISetting: noop, + setBreadcrumbs: noop, + setRecentlyAccessed: noop, + getSavedObjects: noop, + getSavedObjectsClient: noop, + getUISettings: noop, +}; diff --git a/x-pack/plugins/canvas/public/state/initial_state.js b/x-pack/plugins/canvas/public/state/initial_state.js index 13021893e72e8..f9b02d33d6112 100644 --- a/x-pack/plugins/canvas/public/state/initial_state.js +++ b/x-pack/plugins/canvas/public/state/initial_state.js @@ -9,11 +9,13 @@ import { platformService } from '../services'; import { getDefaultWorkpad } from './defaults'; export const getInitialState = (path) => { + const { getHasWriteAccess } = platformService.getService(); + const state = { app: {}, // Kibana stuff in here assets: {}, // assets end up here transient: { - canUserWrite: platformService.getService().coreStart.application.capabilities.canvas.save, + canUserWrite: getHasWriteAccess(), zoomScale: 1, elementStats: { total: 0, diff --git a/x-pack/plugins/canvas/public/state/reducers/workpad.js b/x-pack/plugins/canvas/public/state/reducers/workpad.js index 9a0c30bdf1337..fffcb69c451ed 100644 --- a/x-pack/plugins/canvas/public/state/reducers/workpad.js +++ b/x-pack/plugins/canvas/public/state/reducers/workpad.js @@ -25,11 +25,7 @@ export const workpadReducer = handleActions( [setWorkpad]: (workpadState, { payload }) => { platformService .getService() - .coreStart.chrome.recentlyAccessed.add( - `${APP_ROUTE_WORKPAD}/${payload.id}`, - payload.name, - payload.id - ); + .setRecentlyAccessed(`${APP_ROUTE_WORKPAD}/${payload.id}`, payload.name, payload.id); return payload; }, diff --git a/x-pack/plugins/canvas/public/state/selectors/workpad.ts b/x-pack/plugins/canvas/public/state/selectors/workpad.ts index 1d7ea05daaa61..a677bcaf29e61 100644 --- a/x-pack/plugins/canvas/public/state/selectors/workpad.ts +++ b/x-pack/plugins/canvas/public/state/selectors/workpad.ts @@ -497,7 +497,7 @@ export function getRenderedWorkpad(state: State) { const workpad = getWorkpad(state); // eslint-disable-next-line no-unused-vars - const { pages, ...rest } = workpad; + const { pages, variables, ...rest } = workpad; return { pages: renderedPages, diff --git a/x-pack/plugins/canvas/shareable_runtime/README.md b/x-pack/plugins/canvas/shareable_runtime/README.md index 8fdeb6ca6258e..3839e7c4ecb3f 100644 --- a/x-pack/plugins/canvas/shareable_runtime/README.md +++ b/x-pack/plugins/canvas/shareable_runtime/README.md @@ -207,7 +207,7 @@ There are a number of options for the build script: ### Prerequisite -Before testing or running this PR locally, you **must** run `node scripts/runtime` from `/canvas` _after_ `yarn kbn bootstrap` and _before_ starting Kibana. It is only built automatically when Kibana is built to avoid slowing down other development activities. +Before testing or running this PR locally, you **must** run `node scripts/shareable_runtime` from `/canvas` _after_ `yarn kbn bootstrap` and _before_ starting Kibana. It is only built automatically when Kibana is built to avoid slowing down other development activities. ### Webpack Dev Server diff --git a/x-pack/plugins/canvas/shareable_runtime/webpack.config.js b/x-pack/plugins/canvas/shareable_runtime/webpack.config.js index 1a5a21985ba72..93dc3dbccd549 100644 --- a/x-pack/plugins/canvas/shareable_runtime/webpack.config.js +++ b/x-pack/plugins/canvas/shareable_runtime/webpack.config.js @@ -55,7 +55,6 @@ module.exports = { options: { presets: [require.resolve('@kbn/babel-preset/webpack_preset')], }, - sideEffects: false, }, { test: /\.tsx?$/, @@ -92,6 +91,7 @@ module.exports = { }, }, ], + sideEffects: true, }, { test: /\.module\.s(a|c)ss$/, diff --git a/x-pack/plugins/canvas/storybook/config.js b/x-pack/plugins/canvas/storybook/config.js index f349f9b7ccf98..dc16d6c46084d 100644 --- a/x-pack/plugins/canvas/storybook/config.js +++ b/x-pack/plugins/canvas/storybook/config.js @@ -8,6 +8,7 @@ import { configure, addDecorator, addParameters } from '@storybook/react'; import { withInfo } from '@storybook/addon-info'; import { create } from '@storybook/theming'; +import { startServices } from '../public/services/stubs'; import { addDecorators } from './decorators'; // If we're running Storyshots, be sure to register the require context hook. @@ -32,6 +33,7 @@ if (process.env.NODE_ENV === 'test') { } addDecorators(); +startServices(); function loadStories() { require('./dll_contexts'); diff --git a/x-pack/plugins/discover_enhanced/kibana.json b/x-pack/plugins/discover_enhanced/kibana.json index fbd04fe009687..531a84cd4c0e0 100644 --- a/x-pack/plugins/discover_enhanced/kibana.json +++ b/x-pack/plugins/discover_enhanced/kibana.json @@ -5,7 +5,7 @@ "server": true, "ui": true, "requiredPlugins": ["uiActions", "embeddable", "discover"], - "optionalPlugins": ["share"], + "optionalPlugins": ["share", "kibanaLegacy"], "configPath": ["xpack", "discoverEnhanced"], "requiredBundles": ["kibanaUtils", "data"] } diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts index 59359fb35f544..434d38c76d428 100644 --- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts +++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/abstract_explore_data_action.ts @@ -9,6 +9,7 @@ import { DiscoverStart } from '../../../../../../src/plugins/discover/public'; import { EmbeddableStart } from '../../../../../../src/plugins/embeddable/public'; import { ViewMode, IEmbeddable } from '../../../../../../src/plugins/embeddable/public'; import { StartServicesGetter } from '../../../../../../src/plugins/kibana_utils/public'; +import { KibanaLegacyStart } from '../../../../../../src/plugins/kibana_legacy/public'; import { CoreStart } from '../../../../../../src/core/public'; import { KibanaURL } from './kibana_url'; import * as shared from './shared'; @@ -18,6 +19,11 @@ export const ACTION_EXPLORE_DATA = 'ACTION_EXPLORE_DATA'; export interface PluginDeps { discover: Pick; embeddable: Pick; + kibanaLegacy?: { + dashboardConfig: { + getHideWriteControls: KibanaLegacyStart['dashboardConfig']['getHideWriteControls']; + }; + }; } export interface CoreDeps { @@ -42,7 +48,17 @@ export abstract class AbstractExploreDataAction { if (!embeddable) return false; - if (!this.params.start().plugins.discover.urlGenerator) return false; + + const { core, plugins } = this.params.start(); + const { capabilities } = core.application; + + if (capabilities.discover && !capabilities.discover.show) return false; + if (!plugins.discover.urlGenerator) return false; + const isDashboardOnlyMode = !!this.params + .start() + .plugins.kibanaLegacy?.dashboardConfig.getHideWriteControls(); + if (isDashboardOnlyMode) return false; + if (!shared.hasExactlyOneIndexPattern(embeddable)) return false; if (embeddable.getInput().viewMode !== ViewMode.VIEW) return false; return true; diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts index 0d22f0a36d418..14cd48ae1f509 100644 --- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts +++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_chart_action.test.ts @@ -34,7 +34,10 @@ afterEach(() => { i18nTranslateSpy.mockClear(); }); -const setup = ({ useRangeEvent = false }: { useRangeEvent?: boolean } = {}) => { +const setup = ({ + useRangeEvent = false, + dashboardOnlyMode = false, +}: { useRangeEvent?: boolean; dashboardOnlyMode?: boolean } = {}) => { type UrlGenerator = UrlGeneratorContract<'DISCOVER_APP_URL_GENERATOR'>; const core = coreMock.createStart(); @@ -54,6 +57,11 @@ const setup = ({ useRangeEvent = false }: { useRangeEvent?: boolean } = {}) => { embeddable: { filtersAndTimeRangeFromContext, }, + kibanaLegacy: { + dashboardConfig: { + getHideWriteControls: () => dashboardOnlyMode, + }, + }, }; const params: Params = { @@ -181,6 +189,26 @@ describe('"Explore underlying data" panel action', () => { expect(isCompatible).toBe(false); }); + + test('return false for dashboard_only mode', async () => { + const { action, context } = setup({ dashboardOnlyMode: true }); + const isCompatible = await action.isCompatible(context); + + expect(isCompatible).toBe(false); + }); + + test('returns false if Discover app is disabled', async () => { + const { action, context, core } = setup(); + + core.application.capabilities = { ...core.application.capabilities }; + (core.application.capabilities as any).discover = { + show: false, + }; + + const isCompatible = await action.isCompatible(context); + + expect(isCompatible).toBe(false); + }); }); describe('getHref()', () => { diff --git a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts index c362e554e96c0..68253655af890 100644 --- a/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts +++ b/x-pack/plugins/discover_enhanced/public/actions/explore_data/explore_data_context_menu_action.test.ts @@ -28,7 +28,7 @@ afterEach(() => { i18nTranslateSpy.mockClear(); }); -const setup = () => { +const setup = ({ dashboardOnlyMode = false }: { dashboardOnlyMode?: boolean } = {}) => { type UrlGenerator = UrlGeneratorContract<'DISCOVER_APP_URL_GENERATOR'>; const core = coreMock.createStart(); @@ -48,6 +48,11 @@ const setup = () => { embeddable: { filtersAndTimeRangeFromContext, }, + kibanaLegacy: { + dashboardConfig: { + getHideWriteControls: () => dashboardOnlyMode, + }, + }, }; const params: Params = { @@ -167,6 +172,26 @@ describe('"Explore underlying data" panel action', () => { expect(isCompatible).toBe(false); }); + + test('return false for dashboard_only mode', async () => { + const { action, context } = setup({ dashboardOnlyMode: true }); + const isCompatible = await action.isCompatible(context); + + expect(isCompatible).toBe(false); + }); + + test('returns false if Discover app is disabled', async () => { + const { action, context, core } = setup(); + + core.application.capabilities = { ...core.application.capabilities }; + (core.application.capabilities as any).discover = { + show: false, + }; + + const isCompatible = await action.isCompatible(context); + + expect(isCompatible).toBe(false); + }); }); describe('getHref()', () => { diff --git a/x-pack/plugins/discover_enhanced/public/plugin.ts b/x-pack/plugins/discover_enhanced/public/plugin.ts index 9613a9a8e3c8c..4b018354aa092 100644 --- a/x-pack/plugins/discover_enhanced/public/plugin.ts +++ b/x-pack/plugins/discover_enhanced/public/plugin.ts @@ -15,6 +15,7 @@ import { import { createStartServicesGetter } from '../../../../src/plugins/kibana_utils/public'; import { DiscoverSetup, DiscoverStart } from '../../../../src/plugins/discover/public'; import { SharePluginSetup, SharePluginStart } from '../../../../src/plugins/share/public'; +import { KibanaLegacySetup, KibanaLegacyStart } from '../../../../src/plugins/kibana_legacy/public'; import { EmbeddableSetup, EmbeddableStart, @@ -39,6 +40,7 @@ declare module '../../../../src/plugins/ui_actions/public' { export interface DiscoverEnhancedSetupDependencies { discover: DiscoverSetup; embeddable: EmbeddableSetup; + kibanaLegacy?: KibanaLegacySetup; share?: SharePluginSetup; uiActions: UiActionsSetup; } @@ -46,6 +48,7 @@ export interface DiscoverEnhancedSetupDependencies { export interface DiscoverEnhancedStartDependencies { discover: DiscoverStart; embeddable: EmbeddableStart; + kibanaLegacy?: KibanaLegacyStart; share?: SharePluginStart; uiActions: UiActionsStart; } diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts index 907c749f8ec0b..12cf7ccac6c59 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/http_requests.ts @@ -92,6 +92,17 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => { ]); }; + const setSimulateTemplateResponse = (response?: HttpResponse, error?: any) => { + const status = error ? error.status || 400 : 200; + const body = error ? JSON.stringify(error.body) : JSON.stringify(response); + + server.respondWith('POST', `${API_BASE_PATH}/index_templates/simulate`, [ + status, + { 'Content-Type': 'application/json' }, + body, + ]); + }; + return { setLoadTemplatesResponse, setLoadIndicesResponse, @@ -102,6 +113,7 @@ const registerHttpRequestMockHelpers = (server: SinonFakeServer) => { setLoadTemplateResponse, setCreateTemplateResponse, setUpdateTemplateResponse, + setSimulateTemplateResponse, }; }; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx index ad445f75f047c..e40cdc026210d 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx +++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/setup_environment.tsx @@ -14,6 +14,8 @@ import { notificationServiceMock, docLinksServiceMock, } from '../../../../../../src/core/public/mocks'; +import { GlobalFlyout } from '../../../../../../src/plugins/es_ui_shared/public'; + import { AppContextProvider } from '../../../public/application/app_context'; import { httpService } from '../../../public/application/services/http'; import { breadcrumbService } from '../../../public/application/services/breadcrumbs'; @@ -23,9 +25,11 @@ import { ExtensionsService } from '../../../public/services'; import { UiMetricService } from '../../../public/application/services/ui_metric'; import { setUiMetricService } from '../../../public/application/services/api'; import { setExtensionsService } from '../../../public/application/store/selectors'; +import { MappingsEditorProvider } from '../../../public/application/components'; import { init as initHttpRequests } from './http_requests'; const mockHttpClient = axios.create({ adapter: axiosXhrAdapter }); +const { GlobalFlyoutProvider } = GlobalFlyout; export const services = { extensionsService: new ExtensionsService(), @@ -62,7 +66,11 @@ export const WithAppDependencies = (Comp: any, overridingDependencies: any = {}) const mergedDependencies = merge({}, appDependencies, overridingDependencies); return ( - + + + + + ); }; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts b/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts index 9889ebe16ba1e..ecedf819e6185 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/helpers/test_subjects.ts @@ -28,6 +28,7 @@ export type TestSubjects = | 'legacyTemplateTable' | 'manageTemplateButton' | 'mappingsTabContent' + | 'previewTabContent' | 'noAliasesCallout' | 'noMappingsCallout' | 'noSettingsCallout' @@ -48,4 +49,5 @@ export type TestSubjects = | 'templateList' | 'templatesTab' | 'templateTable' - | 'viewButton'; + | 'viewButton' + | 'simulateTemplatePreview'; diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts index a397419053351..23b40f4cbd3d7 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.helpers.ts @@ -40,10 +40,15 @@ const createActions = (testBed: TestBed) => { /** * User Actions */ - const selectDetailsTab = (tab: 'summary' | 'settings' | 'mappings' | 'aliases') => { - const tabs = ['summary', 'settings', 'mappings', 'aliases']; + const selectDetailsTab = async ( + tab: 'summary' | 'settings' | 'mappings' | 'aliases' | 'preview' + ) => { + const tabs = ['summary', 'settings', 'mappings', 'aliases', 'preview']; - testBed.find('templateDetails.tab').at(tabs.indexOf(tab)).simulate('click'); + await act(async () => { + testBed.find('templateDetails.tab').at(tabs.indexOf(tab)).simulate('click'); + }); + testBed.component.update(); }; const clickReloadButton = () => { diff --git a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts index f7ebc0bcf632b..06f57896d4900 100644 --- a/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts +++ b/x-pack/plugins/index_management/__jest__/client_integration/home/index_templates_tab.test.ts @@ -493,7 +493,7 @@ describe('Index Templates tab', () => { }); describe('tabs', () => { - test('should have 4 tabs', async () => { + test('should have 5 tabs', async () => { const template = fixtures.getTemplate({ name: `a${getRandomString()}`, indexPatterns: ['template1Pattern1*', 'template1Pattern2'], @@ -524,35 +524,48 @@ describe('Index Templates tab', () => { const { find, actions, exists } = testBed; httpRequestsMockHelpers.setLoadTemplateResponse(template); + httpRequestsMockHelpers.setSimulateTemplateResponse({ simulateTemplate: 'response' }); await actions.clickTemplateAt(0); - expect(find('templateDetails.tab').length).toBe(4); + expect(find('templateDetails.tab').length).toBe(5); expect(find('templateDetails.tab').map((t) => t.text())).toEqual([ 'Summary', 'Settings', 'Mappings', 'Aliases', + 'Preview', ]); // Summary tab should be initial active tab expect(exists('summaryTab')).toBe(true); // Navigate and verify all tabs - actions.selectDetailsTab('settings'); + await actions.selectDetailsTab('settings'); expect(exists('summaryTab')).toBe(false); expect(exists('settingsTabContent')).toBe(true); - actions.selectDetailsTab('aliases'); + await actions.selectDetailsTab('aliases'); expect(exists('summaryTab')).toBe(false); expect(exists('settingsTabContent')).toBe(false); expect(exists('aliasesTabContent')).toBe(true); - actions.selectDetailsTab('mappings'); + await actions.selectDetailsTab('mappings'); expect(exists('summaryTab')).toBe(false); expect(exists('settingsTabContent')).toBe(false); expect(exists('aliasesTabContent')).toBe(false); expect(exists('mappingsTabContent')).toBe(true); + + await actions.selectDetailsTab('preview'); + expect(exists('summaryTab')).toBe(false); + expect(exists('settingsTabContent')).toBe(false); + expect(exists('aliasesTabContent')).toBe(false); + expect(exists('mappingsTabContent')).toBe(false); + expect(exists('previewTabContent')).toBe(true); + + expect(find('simulateTemplatePreview').text().replace(/\s/g, '')).toEqual( + JSON.stringify({ simulateTemplate: 'response' }) + ); }); test('should show an info callout if data is not present', async () => { @@ -568,17 +581,17 @@ describe('Index Templates tab', () => { await actions.clickTemplateAt(0); - expect(find('templateDetails.tab').length).toBe(4); + expect(find('templateDetails.tab').length).toBe(5); expect(exists('summaryTab')).toBe(true); // Navigate and verify callout message per tab - actions.selectDetailsTab('settings'); + await actions.selectDetailsTab('settings'); expect(exists('noSettingsCallout')).toBe(true); - actions.selectDetailsTab('mappings'); + await actions.selectDetailsTab('mappings'); expect(exists('noMappingsCallout')).toBe(true); - actions.selectDetailsTab('aliases'); + await actions.selectDetailsTab('aliases'); expect(exists('noAliasesCallout')).toBe(true); }); }); diff --git a/x-pack/plugins/index_management/common/constants/index.ts b/x-pack/plugins/index_management/common/constants/index.ts index d1700f0e611c0..11240271503e2 100644 --- a/x-pack/plugins/index_management/common/constants/index.ts +++ b/x-pack/plugins/index_management/common/constants/index.ts @@ -47,7 +47,9 @@ export { UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB, UIM_TEMPLATE_DETAIL_PANEL_MAPPINGS_TAB, UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB, + UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB, UIM_TEMPLATE_CREATE, UIM_TEMPLATE_UPDATE, UIM_TEMPLATE_CLONE, + UIM_TEMPLATE_SIMULATE, } from './ui_metric'; diff --git a/x-pack/plugins/index_management/common/constants/ui_metric.ts b/x-pack/plugins/index_management/common/constants/ui_metric.ts index 5fda812c704d1..545555b92f352 100644 --- a/x-pack/plugins/index_management/common/constants/ui_metric.ts +++ b/x-pack/plugins/index_management/common/constants/ui_metric.ts @@ -41,6 +41,8 @@ export const UIM_TEMPLATE_DETAIL_PANEL_SUMMARY_TAB = 'template_details_summary_t export const UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB = 'template_details_settings_tab'; export const UIM_TEMPLATE_DETAIL_PANEL_MAPPINGS_TAB = 'template_details_mappings_tab'; export const UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB = 'template_details_aliases_tab'; +export const UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB = 'template_details_preview_tab'; export const UIM_TEMPLATE_CREATE = 'template_create'; export const UIM_TEMPLATE_UPDATE = 'template_update'; export const UIM_TEMPLATE_CLONE = 'template_clone'; +export const UIM_TEMPLATE_SIMULATE = 'template_simulate'; diff --git a/x-pack/plugins/index_management/common/lib/template_serialization.ts b/x-pack/plugins/index_management/common/lib/template_serialization.ts index 069d6ac29fbca..1803d89a40016 100644 --- a/x-pack/plugins/index_management/common/lib/template_serialization.ts +++ b/x-pack/plugins/index_management/common/lib/template_serialization.ts @@ -109,7 +109,7 @@ export function serializeLegacyTemplate(template: TemplateDeserialized): LegacyT version, order, indexPatterns, - template: { settings, aliases, mappings }, + template: { settings, aliases, mappings } = {}, } = template; return { diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts index 3d496d68cc66e..a112d73230b82 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts +++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts @@ -61,11 +61,10 @@ describe('', () => { const { exists, find, actions, component } = testBed; // Verify flyout exists with correct title - expect(exists('componentTemplateDetails')).toBe(true); - expect(find('componentTemplateDetails.title').text()).toBe(COMPONENT_TEMPLATE.name); + expect(find('title').text()).toBe(COMPONENT_TEMPLATE.name); // Verify footer does not display since "actions" prop was not provided - expect(exists('componentTemplateDetails.footer')).toBe(false); + expect(exists('footer')).toBe(false); // Verify tabs exist expect(exists('settingsTab')).toBe(true); @@ -185,7 +184,7 @@ describe('', () => { const { exists, actions, component, find } = testBed; // Verify footer exists - expect(exists('componentTemplateDetails.footer')).toBe(true); + expect(exists('footer')).toBe(true); expect(exists('manageComponentTemplateButton')).toBe(true); // Click manage button and verify actions diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts index 25c2d654fd900..fe81e8dcfe123 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts +++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/component_template_details.helpers.ts @@ -6,7 +6,7 @@ import { registerTestBed, TestBed } from '../../../../../../../../../test_utils'; import { WithAppDependencies } from './setup_environment'; -import { ComponentTemplateDetailsFlyout } from '../../../component_template_details'; +import { ComponentTemplateDetailsFlyoutContent } from '../../../component_template_details'; export type ComponentTemplateDetailsTestBed = TestBed & { actions: ReturnType; @@ -44,7 +44,7 @@ const createActions = (testBed: TestBed) = export const setup = (props: any): ComponentTemplateDetailsTestBed => { const setupTestBed = registerTestBed( - WithAppDependencies(ComponentTemplateDetailsFlyout), + WithAppDependencies(ComponentTemplateDetailsFlyoutContent), { memoryRouter: { wrapComponent: false, @@ -65,6 +65,8 @@ export type ComponentTemplateDetailsTestSubjects = | 'componentTemplateDetails' | 'componentTemplateDetails.title' | 'componentTemplateDetails.footer' + | 'title' + | 'footer' | 'summaryTab' | 'mappingsTab' | 'settingsTab' diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx index 7e460d3855cb0..2f7317e3e656b 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx +++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx @@ -15,12 +15,15 @@ import { applicationServiceMock, } from '../../../../../../../../../../src/core/public/mocks'; +import { GlobalFlyout } from '../../../../../../../../../../src/plugins/es_ui_shared/public'; +import { MappingsEditorProvider } from '../../../../mappings_editor'; import { ComponentTemplatesProvider } from '../../../component_templates_context'; import { init as initHttpRequests } from './http_requests'; import { API_BASE_PATH } from './constants'; const mockHttpClient = axios.create({ adapter: axiosXhrAdapter }); +const { GlobalFlyoutProvider } = GlobalFlyout; const appDependencies = { httpClient: (mockHttpClient as unknown) as HttpSetup, @@ -42,7 +45,11 @@ export const setupEnvironment = () => { }; export const WithAppDependencies = (Comp: any) => (props: any) => ( - - - + + + + + + + ); diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx index 60f1fff3cc9de..0f5bc64c358b9 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx @@ -8,7 +8,6 @@ import React, { useState } from 'react'; import { FormattedMessage } from '@kbn/i18n/react'; import { - EuiFlyout, EuiFlyoutHeader, EuiTitle, EuiFlyoutBody, @@ -28,14 +27,19 @@ import { ComponentTemplateTabs, TabType } from './tabs'; import { ManageButton, ManageAction } from './manage_button'; import { attemptToDecodeURI } from '../lib'; -interface Props { +export interface Props { componentTemplateName: string; onClose: () => void; actions?: ManageAction[]; showSummaryCallToAction?: boolean; } -export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({ +export const defaultFlyoutProps = { + 'data-test-subj': 'componentTemplateDetails', + 'aria-labelledby': 'componentTemplateDetailsFlyoutTitle', +}; + +export const ComponentTemplateDetailsFlyoutContent: React.FunctionComponent = ({ componentTemplateName, onClose, actions, @@ -109,13 +113,7 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({ } return ( - + <> @@ -172,6 +170,6 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({ )} - + ); }; diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts index 11aac200a2f14..8687a1f5b89c0 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/index.ts @@ -4,4 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -export { ComponentTemplateDetailsFlyout } from './component_template_details'; +export { + ComponentTemplateDetailsFlyoutContent, + defaultFlyoutProps, + Props as ComponentTemplateDetailsProps, +} from './component_template_details'; diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx index efc8b649ef872..8ba7409a9ac57 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx @@ -4,18 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useCallback } from 'react'; import { RouteComponentProps } from 'react-router-dom'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; import { ScopedHistory } from 'kibana/public'; import { EuiLink, EuiText, EuiSpacer } from '@elastic/eui'; -import { SectionLoading, ComponentTemplateDeserialized } from '../shared_imports'; +import { SectionLoading, ComponentTemplateDeserialized, GlobalFlyout } from '../shared_imports'; import { UIM_COMPONENT_TEMPLATE_LIST_LOAD } from '../constants'; import { attemptToDecodeURI } from '../lib'; import { useComponentTemplatesContext } from '../component_templates_context'; -import { ComponentTemplateDetailsFlyout } from '../component_template_details'; +import { + ComponentTemplateDetailsFlyoutContent, + defaultFlyoutProps, + ComponentTemplateDetailsProps, +} from '../component_template_details'; import { EmptyPrompt } from './empty_prompt'; import { ComponentTable } from './table'; import { LoadError } from './error'; @@ -26,39 +30,112 @@ interface Props { history: RouteComponentProps['history']; } +const { useGlobalFlyout } = GlobalFlyout; + export const ComponentTemplateList: React.FunctionComponent = ({ componentTemplateName, history, }) => { + const { + addContent: addContentToGlobalFlyout, + removeContent: removeContentFromGlobalFlyout, + } = useGlobalFlyout(); const { api, trackMetric, documentation } = useComponentTemplatesContext(); const { data, isLoading, error, sendRequest } = api.useLoadComponentTemplates(); const [componentTemplatesToDelete, setComponentTemplatesToDelete] = useState([]); - const goToComponentTemplateList = () => { + const goToComponentTemplateList = useCallback(() => { return history.push({ pathname: 'component_templates', }); - }; - - const goToEditComponentTemplate = (name: string) => { - return history.push({ - pathname: encodeURI(`edit_component_template/${encodeURIComponent(name)}`), - }); - }; + }, [history]); + + const goToEditComponentTemplate = useCallback( + (name: string) => { + return history.push({ + pathname: encodeURI(`edit_component_template/${encodeURIComponent(name)}`), + }); + }, + [history] + ); - const goToCloneComponentTemplate = (name: string) => { - return history.push({ - pathname: encodeURI(`create_component_template/${encodeURIComponent(name)}`), - }); - }; + const goToCloneComponentTemplate = useCallback( + (name: string) => { + return history.push({ + pathname: encodeURI(`create_component_template/${encodeURIComponent(name)}`), + }); + }, + [history] + ); // Track component loaded useEffect(() => { trackMetric('loaded', UIM_COMPONENT_TEMPLATE_LIST_LOAD); }, [trackMetric]); + useEffect(() => { + if (componentTemplateName) { + const actions = [ + { + name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.editButtonLabel', { + defaultMessage: 'Edit', + }), + icon: 'pencil', + handleActionClick: () => + goToEditComponentTemplate(attemptToDecodeURI(componentTemplateName)), + }, + { + name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.cloneActionLabel', { + defaultMessage: 'Clone', + }), + icon: 'copy', + handleActionClick: () => + goToCloneComponentTemplate(attemptToDecodeURI(componentTemplateName)), + }, + { + name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.deleteButtonLabel', { + defaultMessage: 'Delete', + }), + icon: 'trash', + getIsDisabled: (details: ComponentTemplateDeserialized) => + details._kbnMeta.usedBy.length > 0, + closePopoverOnClick: true, + handleActionClick: () => { + setComponentTemplatesToDelete([attemptToDecodeURI(componentTemplateName)]); + }, + }, + ]; + + // Open the flyout with the Component Template Details content + addContentToGlobalFlyout({ + id: 'componentTemplateDetails', + Component: ComponentTemplateDetailsFlyoutContent, + props: { + onClose: goToComponentTemplateList, + componentTemplateName, + showSummaryCallToAction: true, + actions, + }, + flyoutProps: { ...defaultFlyoutProps, onClose: goToComponentTemplateList }, + }); + } + }, [ + componentTemplateName, + goToComponentTemplateList, + goToEditComponentTemplate, + goToCloneComponentTemplate, + addContentToGlobalFlyout, + history, + ]); + + useEffect(() => { + if (!componentTemplateName) { + removeContentFromGlobalFlyout('componentTemplateDetails'); + } + }, [componentTemplateName, removeContentFromGlobalFlyout]); + let content: React.ReactNode; if (isLoading) { @@ -126,45 +203,6 @@ export const ComponentTemplateList: React.FunctionComponent = ({ componentTemplatesToDelete={componentTemplatesToDelete} /> ) : null} - - {/* details flyout */} - {componentTemplateName && ( - - goToEditComponentTemplate(attemptToDecodeURI(componentTemplateName)), - }, - { - name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.cloneActionLabel', { - defaultMessage: 'Clone', - }), - icon: 'copy', - handleActionClick: () => - goToCloneComponentTemplate(attemptToDecodeURI(componentTemplateName)), - }, - { - name: i18n.translate('xpack.idxMgmt.componentTemplateDetails.deleteButtonLabel', { - defaultMessage: 'Delete', - }), - icon: 'trash', - getIsDisabled: (details: ComponentTemplateDeserialized) => - details._kbnMeta.usedBy.length > 0, - closePopoverOnClick: true, - handleActionClick: () => { - setComponentTemplatesToDelete([attemptToDecodeURI(componentTemplateName)]); - }, - }, - ]} - /> - )}

); }; diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx index 8795c08fd2bee..ed570579d4e45 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.tsx @@ -11,8 +11,12 @@ import { FormattedMessage } from '@kbn/i18n/react'; import { i18n } from '@kbn/i18n'; import { ComponentTemplateListItem } from '../../../../../common'; -import { SectionError, SectionLoading } from '../shared_imports'; -import { ComponentTemplateDetailsFlyout } from '../component_template_details'; +import { SectionError, SectionLoading, GlobalFlyout } from '../shared_imports'; +import { + ComponentTemplateDetailsFlyoutContent, + defaultFlyoutProps, + ComponentTemplateDetailsProps, +} from '../component_template_details'; import { CreateButtonPopOver } from './components'; import { ComponentTemplates } from './component_templates'; import { ComponentTemplatesSelection } from './component_templates_selection'; @@ -20,10 +24,12 @@ import { useApi } from '../component_templates_context'; import './component_templates_selector.scss'; +const { useGlobalFlyout } = GlobalFlyout; + interface Props { onChange: (components: string[]) => void; onComponentsLoaded: (components: ComponentTemplateListItem[]) => void; - defaultValue: string[]; + defaultValue?: string[]; docUri: string; emptyPrompt?: { text?: string | JSX.Element; @@ -53,6 +59,10 @@ export const ComponentTemplatesSelector = ({ emptyPrompt: { text, showCreateButton } = {}, }: Props) => { const { data: components, isLoading, error } = useApi().useLoadComponentTemplates(); + const { + addContent: addContentToGlobalFlyout, + removeContent: removeContentFromGlobalFlyout, + } = useGlobalFlyout(); const [selectedComponent, setSelectedComponent] = useState(null); const [componentsSelected, setComponentsSelected] = useState([]); const isInitialized = useRef(false); @@ -60,15 +70,20 @@ export const ComponentTemplatesSelector = ({ const hasSelection = Object.keys(componentsSelected).length > 0; const hasComponents = components && components.length > 0 ? true : false; + const closeComponentTemplateDetails = () => { + setSelectedComponent(null); + }; + useEffect(() => { if (components) { if ( + defaultValue && defaultValue.length > 0 && componentsSelected.length === 0 && isInitialized.current === false ) { - // Once the components are loaded we check the ones selected - // from the defaultValue provided + // Once the components are fetched, we check the ones previously selected + // from the prop "defaultValue" passed. const nextComponentsSelected = defaultValue .map((name) => components.find((comp) => comp.name === name)) .filter(Boolean) as ComponentTemplateListItem[]; @@ -88,6 +103,30 @@ export const ComponentTemplatesSelector = ({ } }, [isLoading, error, components, onComponentsLoaded]); + useEffect(() => { + if (selectedComponent) { + // Open the flyout with the Component Template Details content + addContentToGlobalFlyout({ + id: 'componentTemplateDetails', + Component: ComponentTemplateDetailsFlyoutContent, + props: { + onClose: closeComponentTemplateDetails, + componentTemplateName: selectedComponent, + }, + flyoutProps: { ...defaultFlyoutProps, onClose: closeComponentTemplateDetails }, + cleanUpFunc: () => { + setSelectedComponent(null); + }, + }); + } + }, [selectedComponent, addContentToGlobalFlyout]); + + useEffect(() => { + if (!selectedComponent) { + removeContentFromGlobalFlyout('componentTemplateDetails'); + } + }, [selectedComponent, removeContentFromGlobalFlyout]); + const onSelectionReorder = (reorderedComponents: ComponentTemplateListItem[]) => { setComponentsSelected(reorderedComponents); }; @@ -198,30 +237,12 @@ export const ComponentTemplatesSelector = ({ ); - const renderComponentDetails = () => { - if (!selectedComponent) { - return null; - } - - return ( - setSelectedComponent(null)} - componentTemplateName={selectedComponent} - /> - ); - }; - if (isLoading) { return renderLoading(); } else if (error) { return renderError(); } else if (hasComponents) { - return ( - <> - {renderSelector()} - {renderComponentDetails()} - - ); + return renderSelector(); } // No components: render empty prompt @@ -244,6 +265,7 @@ export const ComponentTemplatesSelector = ({

); + return ( { + const [templatePreview, setTemplatePreview] = useState('{}'); + + const updatePreview = useCallback(async () => { + if (!template || Object.keys(template).length === 0) { + return; + } + + const indexTemplate = serializeTemplate(stripEmptyFields(template) as TemplateDeserialized); + + // Until ES fixes a bug on their side we will send a random index pattern to the simulate API. + // Issue: https://github.com/elastic/elasticsearch/issues/59152 + indexTemplate.index_patterns = [uuid.v4()]; + + const { data, error } = await simulateIndexTemplate(indexTemplate); + + if (data) { + // "Overlapping" info is only useful when simulating against an index + // which we don't do here. + delete data.overlapping; + } + + setTemplatePreview(JSON.stringify(data ?? error, null, 2)); + }, [template]); + + useEffect(() => { + updatePreview(); + }, [updatePreview]); + + return templatePreview === '{}' ? null : ( + + {templatePreview} + + ); +}); diff --git a/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx b/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx new file mode 100644 index 0000000000000..63bfe78546041 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/index_templates/simulate_template/simulate_template_flyout.tsx @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React, { useState, useCallback, useEffect, useRef } from 'react'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { + EuiFlyoutHeader, + EuiTitle, + EuiFlyoutBody, + EuiFlyoutFooter, + EuiFlexGroup, + EuiFlexItem, + EuiButton, + EuiButtonEmpty, + EuiTextColor, + EuiText, + EuiSpacer, +} from '@elastic/eui'; + +import { SimulateTemplate } from './simulate_template'; + +export interface Props { + onClose(): void; + getTemplate: () => { [key: string]: any }; +} + +export const defaultFlyoutProps = { + 'data-test-subj': 'simulateTemplateFlyout', + 'aria-labelledby': 'simulateTemplateFlyoutTitle', +}; + +export const SimulateTemplateFlyoutContent = ({ onClose, getTemplate }: Props) => { + const isMounted = useRef(false); + const [heightCodeBlock, setHeightCodeBlock] = useState(0); + const [template, setTemplate] = useState<{ [key: string]: any }>({}); + + useEffect(() => { + setHeightCodeBlock( + document.getElementsByClassName('euiFlyoutBody__overflow')[0].clientHeight - 96 + ); + }, []); + + const updatePreview = useCallback(async () => { + const indexTemplate = await getTemplate(); + setTemplate(indexTemplate); + }, [getTemplate]); + + useEffect(() => { + if (isMounted.current === false) { + updatePreview(); + } + isMounted.current = true; + }, [updatePreview]); + + return ( + <> + + +

+ +

+
+ + + +

+ +

+
+
+
+ + + + + + + + + + + + + + + + + + + + + + ); +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx index 311cb37d0b47a..64347d19e9b47 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/shape_datatype.test.tsx @@ -36,8 +36,6 @@ describe('Mappings editor: shape datatype', () => { test('initial view and default parameters values', async () => { const defaultMappings = { - _meta: {}, - _source: {}, properties: { myField: { type: 'shape', diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx index ed60414d198f1..c03aa4805d27f 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/datatypes/text_datatype.test.tsx @@ -47,8 +47,6 @@ describe.skip('Mappings editor: text datatype', () => { test('initial view and default parameters values', async () => { const defaultMappings = { - _meta: {}, - _source: {}, properties: { myField: { type: 'text', diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx index 4f9d8a960a1a2..c146c7704911f 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/edit_field.test.tsx @@ -65,8 +65,6 @@ describe('Mappings editor: edit field', () => { test('should update form parameters when changing the field datatype', async () => { const defaultMappings = { - _meta: {}, - _source: {}, properties: { userName: { ...defaultTextParameters, diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx index 638bbfd925ffb..a6558b28a1273 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/__jest__/client_integration/helpers/mappings_editor.helpers.tsx @@ -7,9 +7,11 @@ import React from 'react'; import { act } from 'react-dom/test-utils'; import { ReactWrapper } from 'enzyme'; +import { GlobalFlyout } from '../../../../../../../../../../src/plugins/es_ui_shared/public'; import { registerTestBed, TestBed } from '../../../../../../../../../test_utils'; import { getChildFieldsName } from '../../../lib'; import { MappingsEditor } from '../../../mappings_editor'; +import { MappingsEditorProvider } from '../../../mappings_editor_context'; jest.mock('@elastic/eui', () => { const original = jest.requireActual('@elastic/eui'); @@ -51,6 +53,8 @@ jest.mock('@elastic/eui', () => { }; }); +const { GlobalFlyoutProvider } = GlobalFlyout; + export interface DomFields { [key: string]: { type: string; @@ -247,7 +251,15 @@ const createActions = (testBed: TestBed) => { }; export const setup = (props: any = { onUpdate() {} }): MappingsEditorTestBed => { - const setupTestBed = registerTestBed(MappingsEditor, { + const ComponentToTest = (propsOverride: { [key: string]: any }) => ( + + + + + + ); + + const setupTestBed = registerTestBed(ComponentToTest, { memoryRouter: { wrapComponent: false, }, diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx index 86bcc796a88eb..20b2e11855029 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form.tsx @@ -7,16 +7,14 @@ import React, { useEffect, useRef } from 'react'; import { EuiSpacer } from '@elastic/eui'; import { useForm, Form, SerializerFunc } from '../../shared_imports'; -import { GenericObject } from '../../types'; -import { Types, useDispatch } from '../../mappings_state'; +import { GenericObject, MappingsConfiguration } from '../../types'; +import { useDispatch } from '../../mappings_state_context'; import { DynamicMappingSection } from './dynamic_mapping_section'; import { SourceFieldSection } from './source_field_section'; import { MetaFieldSection } from './meta_field_section'; import { RoutingSection } from './routing_section'; import { configurationFormSchema } from './configuration_form_schema'; -type MappingsConfiguration = Types['MappingsConfiguration']; - interface Props { value?: MappingsConfiguration; } diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx index 6e80f8b813ec2..8742dfc916924 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/configuration_form/configuration_form_schema.tsx @@ -11,8 +11,7 @@ import { EuiLink, EuiCode } from '@elastic/eui'; import { documentationService } from '../../../../services/documentation'; import { FormSchema, FIELD_TYPES, VALIDATION_TYPES, fieldValidators } from '../../shared_imports'; -import { MappingsConfiguration } from '../../reducer'; -import { ComboBoxOption } from '../../types'; +import { ComboBoxOption, MappingsConfiguration } from '../../types'; const { containsCharsField, isJsonField } = fieldValidators; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx index 400de4052afa4..4b19b6f7ae5c3 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/document_fields.tsx @@ -6,7 +6,7 @@ import React, { useMemo, useCallback } from 'react'; import { EuiSpacer } from '@elastic/eui'; -import { useMappingsState, useDispatch } from '../../mappings_state'; +import { useMappingsState, useDispatch } from '../../mappings_state_context'; import { deNormalize } from '../../lib'; import { EditFieldContainer } from './fields'; import { DocumentFieldsHeader } from './document_fields_header'; @@ -18,7 +18,7 @@ export const DocumentFields = React.memo(() => { const { fields, search, documentFields } = useMappingsState(); const dispatch = useDispatch(); - const { status, fieldToEdit, editor: editorType } = documentFields; + const { editor: editorType } = documentFields; const jsonEditorDefaultValue = useMemo(() => { if (editorType === 'json') { @@ -33,14 +33,6 @@ export const DocumentFields = React.memo(() => { ); - const renderEditField = () => { - if (status !== 'editingField') { - return null; - } - const field = fields.byId[fieldToEdit!]; - return ; - }; - const onSearchChange = useCallback( (value: string) => { dispatch({ type: 'search:update', value }); @@ -59,7 +51,7 @@ export const DocumentFields = React.memo(() => { ) : ( editor )} - {renderEditField()} + ); }); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx index 51f9ca63be403..ad283a3fe47bd 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/editor_toggle_controls.tsx @@ -7,7 +7,7 @@ import React from 'react'; import { EuiButton, EuiText } from '@elastic/eui'; -import { useDispatch, useMappingsState } from '../../mappings_state'; +import { useDispatch, useMappingsState } from '../../mappings_state_context'; import { FieldsEditor } from '../../types'; import { canUseMappingsEditor, normalize } from '../../lib'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx index 01cca7e249a23..0320f2ff51da3 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/name_parameter.tsx @@ -9,7 +9,7 @@ import React from 'react'; import { TextField, UseField, FieldConfig } from '../../../shared_imports'; import { validateUniqueName } from '../../../lib'; import { PARAMETERS_DEFINITION } from '../../../constants'; -import { useMappingsState } from '../../../mappings_state'; +import { useMappingsState } from '../../../mappings_state_context'; export const NameParameter = () => { const { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx index 46e70bf8e56ba..31ae37c82a43e 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/type_parameter.tsx @@ -70,7 +70,13 @@ export const TypeParameter = ({ isMultiField, isRootLevelField, showDocLink = fa : filterTypesForNonRootFields(FIELD_TYPES_OPTIONS) } selectedOptions={typeField.value} - onChange={typeField.setValue} + onChange={(value) => { + if (value.length === 0) { + // Don't allow clearing the type. One must always be selected + return; + } + typeField.setValue(value); + }} isClearable={false} data-test-subj="fieldType" /> diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx index 57a765c38dd26..dc631b7dbf32d 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx @@ -18,7 +18,7 @@ import { import { useForm, Form, FormDataProvider } from '../../../../shared_imports'; import { EUI_SIZE } from '../../../../constants'; -import { useDispatch } from '../../../../mappings_state'; +import { useDispatch } from '../../../../mappings_state_context'; import { fieldSerializer } from '../../../../lib'; import { Field, NormalizedFields } from '../../../../types'; import { NameParameter, TypeParameter, SubTypeParameter } from '../../field_parameters'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx index 80e3e9bec605a..2a98b5948e5a9 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/delete_field_provider.tsx @@ -7,7 +7,7 @@ import React, { useState } from 'react'; import { i18n } from '@kbn/i18n'; -import { useMappingsState, useDispatch } from '../../../mappings_state'; +import { useMappingsState, useDispatch } from '../../../mappings_state_context'; import { NormalizedField } from '../../../types'; import { getAllDescendantAliases } from '../../../lib'; import { ModalConfirmationDeleteFields } from './modal_confirmation_delete_fields'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx index e8e41955a5e80..e6950ccfe253e 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field.tsx @@ -6,7 +6,6 @@ import React from 'react'; import { i18n } from '@kbn/i18n'; import { - EuiFlyout, EuiFlyoutHeader, EuiFlyoutBody, EuiFlyoutFooter, @@ -25,7 +24,7 @@ import { TYPE_DEFINITION } from '../../../../constants'; import { Field, NormalizedField, NormalizedFields, MainType, SubType } from '../../../../types'; import { CodeBlock } from '../../../code_block'; import { getParametersFormForType } from '../field_types'; -import { UpdateFieldProvider, UpdateFieldFunc } from './update_field_provider'; +import { UpdateFieldFunc } from './use_update_field'; import { EditFieldHeaderForm } from './edit_field_header_form'; const limitStringLength = (text: string, limit = 18): string => { @@ -36,19 +35,28 @@ const limitStringLength = (text: string, limit = 18): string => { return `...${text.substr(limit * -1)}`; }; -interface Props { +export interface Props { form: FormHook; field: NormalizedField; allFields: NormalizedFields['byId']; exitEdit(): void; + updateField: UpdateFieldFunc; } -export const EditField = React.memo(({ form, field, allFields, exitEdit }: Props) => { - const getSubmitForm = (updateField: UpdateFieldFunc) => async (e?: React.FormEvent) => { - if (e) { - e.preventDefault(); - } +export const defaultFlyoutProps = { + 'data-test-subj': 'mappingsEditorFieldEdit', + 'aria-labelledby': 'mappingsEditorFieldEditTitle', + className: 'mappingsEditor__editField', + maxWidth: 720, +}; + +// The default FormWrapper is the , which wrapps the form with +// a
. We can't have a div as first child of the Flyout as it breaks +// the height calculaction and does not render the footer position correctly. +const FormWrapper: React.FC = ({ children }) => <>{children}; +export const EditField = React.memo(({ form, field, allFields, exitEdit, updateField }: Props) => { + const submitForm = async () => { const { isValid, data } = await form.submit(); if (isValid) { @@ -56,174 +64,152 @@ export const EditField = React.memo(({ form, field, allFields, exitEdit }: Props } }; - const cancel = () => { - exitEdit(); - }; - const { isMultiField } = field; return ( - - {(updateField) => ( -
- - - - - {/* We need an extra div to get out of flex grow */} -
- {/* Title */} - -

- {isMultiField - ? i18n.translate('xpack.idxMgmt.mappingsEditor.editMultiFieldTitle', { - defaultMessage: "Edit multi-field '{fieldName}'", - values: { - fieldName: limitStringLength(field.source.name), - }, - }) - : i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldTitle', { - defaultMessage: "Edit field '{fieldName}'", - values: { - fieldName: limitStringLength(field.source.name), - }, - })} -

-
-
-
- - {/* Documentation link */} - - {({ type, subType }) => { - const linkDocumentation = - documentationService.getTypeDocLink(subType) || - documentationService.getTypeDocLink(type); - - if (!linkDocumentation) { - return null; - } - - const typeDefinition = TYPE_DEFINITION[type as MainType]; - const subTypeDefinition = TYPE_DEFINITION[subType as SubType]; - - return ( - - - {i18n.translate( - 'xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', - { - defaultMessage: '{type} documentation', - values: { - type: subTypeDefinition - ? subTypeDefinition.label - : typeDefinition.label, - }, - } - )} - - - ); - }} - -
- - {/* Field path */} - - - {field.path.join(' > ')} - - -
- - - - - - {({ type, subType }) => { - const ParametersForm = getParametersFormForType(type, subType); - - if (!ParametersForm) { - return null; - } - - return ( - - ); - }} - - - - - {form.isSubmitted && !form.isValid && ( - <> - - - - )} - - + + + + + {/* We need an extra div to get out of flex grow */} +
+ {/* Title */} + +

+ {isMultiField + ? i18n.translate('xpack.idxMgmt.mappingsEditor.editMultiFieldTitle', { + defaultMessage: "Edit multi-field '{fieldName}'", + values: { + fieldName: limitStringLength(field.source.name), + }, + }) + : i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldTitle', { + defaultMessage: "Edit field '{fieldName}'", + values: { + fieldName: limitStringLength(field.source.name), + }, + })} +

+
+
+
+ + {/* Documentation link */} + + {({ type, subType }) => { + const linkDocumentation = + documentationService.getTypeDocLink(subType) || + documentationService.getTypeDocLink(type); + + if (!linkDocumentation) { + return null; + } + + const typeDefinition = TYPE_DEFINITION[type as MainType]; + const subTypeDefinition = TYPE_DEFINITION[subType as SubType]; + + return ( - - {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldCancelButtonLabel', { - defaultMessage: 'Cancel', - })} - - - - - {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldUpdateButtonLabel', { - defaultMessage: 'Update', + {i18n.translate('xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', { + defaultMessage: '{type} documentation', + values: { + type: subTypeDefinition ? subTypeDefinition.label : typeDefinition.label, + }, })} - + -
-
-
-
- )} -
+ ); + }} + + + + {/* Field path */} + + + {field.path.join(' > ')} + + + + + + + + + {({ type, subType }) => { + const ParametersForm = getParametersFormForType(type, subType); + + if (!ParametersForm) { + return null; + } + + return ( + + ); + }} + + + + + {form.isSubmitted && !form.isValid && ( + <> + + + + )} + + + + + {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldCancelButtonLabel', { + defaultMessage: 'Cancel', + })} + + + + + {i18n.translate('xpack.idxMgmt.mappingsEditor.editFieldUpdateButtonLabel', { + defaultMessage: 'Update', + })} + + + + + ); }); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx index 5105a2a157a6d..4996f59105c04 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_container.tsx @@ -3,24 +3,38 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import React, { useEffect, useCallback } from 'react'; +import React, { useEffect, useCallback, useMemo } from 'react'; -import { useForm } from '../../../../shared_imports'; -import { useDispatch } from '../../../../mappings_state'; -import { Field, NormalizedField, NormalizedFields } from '../../../../types'; +import { useForm, GlobalFlyout } from '../../../../shared_imports'; +import { useDispatch, useMappingsState } from '../../../../mappings_state_context'; +import { Field } from '../../../../types'; import { fieldSerializer, fieldDeserializer } from '../../../../lib'; -import { EditField } from './edit_field'; +import { ModalConfirmationDeleteFields } from '../modal_confirmation_delete_fields'; +import { EditField, defaultFlyoutProps, Props as EditFieldProps } from './edit_field'; +import { useUpdateField } from './use_update_field'; -interface Props { - field: NormalizedField; - allFields: NormalizedFields['byId']; -} +const { useGlobalFlyout } = GlobalFlyout; -export const EditFieldContainer = React.memo(({ field, allFields }: Props) => { +export const EditFieldContainer = React.memo(() => { + const { fields, documentFields } = useMappingsState(); const dispatch = useDispatch(); + const { + addContent: addContentToGlobalFlyout, + removeContent: removeContentFromGlobalFlyout, + } = useGlobalFlyout(); + const { updateField, modal } = useUpdateField(); + + const { status, fieldToEdit } = documentFields; + const isEditing = status === 'editingField'; + + const field = isEditing ? fields.byId[fieldToEdit!] : undefined; + + const formDefaultValue = useMemo(() => { + return { ...field?.source }; + }, [field?.source]); const { form } = useForm({ - defaultValue: { ...field.source }, + defaultValue: formDefaultValue, serializer: fieldSerializer, deserializer: fieldDeserializer, options: { stripEmptyFields: false }, @@ -40,5 +54,48 @@ export const EditFieldContainer = React.memo(({ field, allFields }: Props) => { dispatch({ type: 'documentField.changeStatus', value: 'idle' }); }, [dispatch]); - return ; + useEffect(() => { + if (isEditing) { + // Open the flyout with the content + addContentToGlobalFlyout({ + id: 'mappingsEditField', + Component: EditField, + props: { + form, + field: field!, + exitEdit, + allFields: fields.byId, + updateField, + }, + flyoutProps: { ...defaultFlyoutProps, onClose: exitEdit }, + cleanUpFunc: exitEdit, + }); + } + }, [ + isEditing, + field, + form, + addContentToGlobalFlyout, + fields.byId, + fieldToEdit, + exitEdit, + updateField, + ]); + + useEffect(() => { + if (!isEditing) { + removeContentFromGlobalFlyout('mappingsEditField'); + } + }, [isEditing, removeContentFromGlobalFlyout]); + + useEffect(() => { + return () => { + if (isEditing) { + // When the component unmounts, exit edit mode. + exitEdit(); + } + }; + }, [isEditing, exitEdit]); + + return modal.isOpen ? : null; }); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx deleted file mode 100644 index e31d12689e7e0..0000000000000 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/update_field_provider.tsx +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React, { useState } from 'react'; -import { i18n } from '@kbn/i18n'; - -import { useMappingsState, useDispatch } from '../../../../mappings_state'; -import { shouldDeleteChildFieldsAfterTypeChange, getAllDescendantAliases } from '../../../../lib'; -import { NormalizedField, DataType } from '../../../../types'; -import { PARAMETERS_DEFINITION } from '../../../../constants'; -import { ModalConfirmationDeleteFields } from '../modal_confirmation_delete_fields'; - -export type UpdateFieldFunc = (field: NormalizedField) => void; - -interface Props { - children: (saveProperty: UpdateFieldFunc) => React.ReactNode; -} - -interface State { - isModalOpen: boolean; - field?: NormalizedField; - aliases?: string[]; -} - -export const UpdateFieldProvider = ({ children }: Props) => { - const [state, setState] = useState({ - isModalOpen: false, - }); - const dispatch = useDispatch(); - - const { fields } = useMappingsState(); - const { byId, aliases } = fields; - - const confirmButtonText = i18n.translate( - 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.confirmDescription', - { - defaultMessage: 'Confirm type change', - } - ); - - let modalTitle: string | undefined; - - if (state.field) { - const { source } = state.field; - - modalTitle = i18n.translate( - 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.title', - { - defaultMessage: "Confirm change '{fieldName}' type to '{fieldType}'.", - values: { - fieldName: source.name, - fieldType: source.type, - }, - } - ); - } - - const closeModal = () => { - setState({ isModalOpen: false }); - }; - - const updateField: UpdateFieldFunc = (field) => { - const previousField = byId[field.id]; - - const willDeleteChildFields = (oldType: DataType, newType: DataType): boolean => { - const { hasChildFields, hasMultiFields } = field; - - if (!hasChildFields && !hasMultiFields) { - // No child or multi-fields will be deleted, no confirmation needed. - return false; - } - - return shouldDeleteChildFieldsAfterTypeChange(oldType, newType); - }; - - if (field.source.type !== previousField.source.type) { - // Array of all the aliases pointing to the current field beeing updated - const aliasesOnField = aliases[field.id] || []; - - // Array of all the aliases pointing to the current field + all its possible children - const aliasesOnFieldAndDescendants = getAllDescendantAliases(field, fields); - - const isReferencedByAlias = aliasesOnField && Boolean(aliasesOnField.length); - const nextTypeCanHaveAlias = !PARAMETERS_DEFINITION.path.targetTypesNotAllowed.includes( - field.source.type - ); - - // We need to check if, by changing the type, we will also - // delete possible child properties ("fields" or "properties"). - // If we will, we need to warn the user about it. - let requiresConfirmation: boolean; - let aliasesToDelete: string[] = []; - - if (isReferencedByAlias && !nextTypeCanHaveAlias) { - aliasesToDelete = aliasesOnFieldAndDescendants; - requiresConfirmation = true; - } else { - requiresConfirmation = willDeleteChildFields(previousField.source.type, field.source.type); - if (requiresConfirmation) { - aliasesToDelete = aliasesOnFieldAndDescendants.filter( - // We will only delete aliases that points to possible children, *NOT* the field itself - (id) => aliasesOnField.includes(id) === false - ); - } - } - - if (requiresConfirmation) { - setState({ - isModalOpen: true, - field, - aliases: Boolean(aliasesToDelete.length) - ? aliasesToDelete.map((id) => byId[id].path.join(' > ')).sort() - : undefined, - }); - return; - } - } - - dispatch({ type: 'field.edit', value: field.source }); - }; - - const confirmTypeUpdate = () => { - dispatch({ type: 'field.edit', value: state.field!.source }); - closeModal(); - }; - - return ( - <> - {children(updateField)} - - {state.isModalOpen && ( - - )} - - ); -}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts new file mode 100644 index 0000000000000..ed659cd05b060 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/use_update_field.ts @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useState, useCallback } from 'react'; +import { i18n } from '@kbn/i18n'; + +import { useMappingsState, useDispatch } from '../../../../mappings_state_context'; +import { shouldDeleteChildFieldsAfterTypeChange, getAllDescendantAliases } from '../../../../lib'; +import { NormalizedField, DataType } from '../../../../types'; +import { PARAMETERS_DEFINITION } from '../../../../constants'; + +export type UpdateFieldFunc = (field: NormalizedField) => void; + +interface State { + isModalOpen: boolean; + field?: NormalizedField; + aliases?: string[]; +} + +export const useUpdateField = () => { + const [state, setState] = useState({ + isModalOpen: false, + }); + const dispatch = useDispatch(); + + const { fields } = useMappingsState(); + const { byId, aliases } = fields; + + const confirmButtonText = i18n.translate( + 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.confirmDescription', + { + defaultMessage: 'Confirm type change', + } + ); + + let modalTitle = ''; + + if (state.field) { + const { source } = state.field; + + modalTitle = i18n.translate( + 'xpack.idxMgmt.mappingsEditor.updateField.confirmationModal.title', + { + defaultMessage: "Confirm change '{fieldName}' type to '{fieldType}'.", + values: { + fieldName: source.name, + fieldType: source.type, + }, + } + ); + } + + const closeModal = () => { + setState({ isModalOpen: false }); + }; + + const updateField: UpdateFieldFunc = useCallback( + (field) => { + const previousField = byId[field.id]; + + const willDeleteChildFields = (oldType: DataType, newType: DataType): boolean => { + const { hasChildFields, hasMultiFields } = field; + + if (!hasChildFields && !hasMultiFields) { + // No child or multi-fields will be deleted, no confirmation needed. + return false; + } + + return shouldDeleteChildFieldsAfterTypeChange(oldType, newType); + }; + + if (field.source.type !== previousField.source.type) { + // Array of all the aliases pointing to the current field beeing updated + const aliasesOnField = aliases[field.id] || []; + + // Array of all the aliases pointing to the current field + all its possible children + const aliasesOnFieldAndDescendants = getAllDescendantAliases(field, fields); + + const isReferencedByAlias = aliasesOnField && Boolean(aliasesOnField.length); + const nextTypeCanHaveAlias = !PARAMETERS_DEFINITION.path.targetTypesNotAllowed.includes( + field.source.type + ); + + // We need to check if, by changing the type, we will also + // delete possible child properties ("fields" or "properties"). + // If we will, we need to warn the user about it. + let requiresConfirmation: boolean; + let aliasesToDelete: string[] = []; + + if (isReferencedByAlias && !nextTypeCanHaveAlias) { + aliasesToDelete = aliasesOnFieldAndDescendants; + requiresConfirmation = true; + } else { + requiresConfirmation = willDeleteChildFields( + previousField.source.type, + field.source.type + ); + if (requiresConfirmation) { + aliasesToDelete = aliasesOnFieldAndDescendants.filter( + // We will only delete aliases that points to possible children, *NOT* the field itself + (id) => aliasesOnField.includes(id) === false + ); + } + } + + if (requiresConfirmation) { + setState({ + isModalOpen: true, + field, + aliases: Boolean(aliasesToDelete.length) + ? aliasesToDelete.map((id) => byId[id].path.join(' > ')).sort() + : undefined, + }); + return; + } + } + + dispatch({ type: 'field.edit', value: field.source }); + }, + [dispatch, aliases, fields, byId] + ); + + const confirmTypeUpdate = () => { + dispatch({ type: 'field.edit', value: state.field!.source }); + closeModal(); + }; + + return { + updateField, + modal: { + isOpen: state.isModalOpen, + props: { + childFields: state.field && state.field.childFields, + title: modalTitle, + aliases: state.aliases, + byId, + confirmButtonText, + onConfirm: confirmTypeUpdate, + onCancel: closeModal, + }, + }, + }; +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx index 55093e606cfa1..7d9ad3bc6aaec 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item_container.tsx @@ -5,7 +5,7 @@ */ import React, { useMemo, useCallback, useRef } from 'react'; -import { useMappingsState, useDispatch } from '../../../mappings_state'; +import { useMappingsState, useDispatch } from '../../../mappings_state_context'; import { NormalizedField } from '../../../types'; import { FieldsListItem } from './fields_list_item'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx index 5954f6f285f10..d750c0e604c5e 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_json_editor.tsx @@ -6,7 +6,7 @@ import React, { useRef, useCallback } from 'react'; -import { useDispatch } from '../../mappings_state'; +import { useDispatch } from '../../mappings_state_context'; import { JsonEditor } from '../../shared_imports'; export interface Props { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx index 9d9df38ef4e25..7a0b72ae647d5 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields_tree_editor.tsx @@ -8,7 +8,7 @@ import React, { useMemo, useCallback } from 'react'; import { EuiButtonEmpty, EuiSpacer } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { useMappingsState, useDispatch } from '../../mappings_state'; +import { useMappingsState, useDispatch } from '../../mappings_state_context'; import { FieldsList, CreateField } from './fields'; export const DocumentFieldsTreeEditor = () => { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx index 9077781b7fb43..f3602a800eeeb 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result.tsx @@ -8,9 +8,8 @@ import VirtualList from 'react-tiny-virtual-list'; import { EuiEmptyPrompt, EuiButton } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; -import { SearchResult as SearchResultType } from '../../../types'; -import { useDispatch } from '../../../mappings_state'; -import { State } from '../../../reducer'; +import { SearchResult as SearchResultType, State } from '../../../types'; +import { useDispatch } from '../../../mappings_state_context'; import { SearchResultItem } from './search_result_item'; interface Props { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx index ab8b90b6be3b5..73d3e078f6ff3 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx @@ -10,7 +10,7 @@ import { i18n } from '@kbn/i18n'; import { SearchResult } from '../../../types'; import { TYPE_DEFINITION } from '../../../constants'; -import { useDispatch } from '../../../mappings_state'; +import { useDispatch } from '../../../mappings_state_context'; import { getTypeLabelFromType } from '../../../lib'; import { DeleteFieldProvider } from '../fields/delete_field_provider'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts index 34c410f06e520..dc7f20f4d026b 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/load_mappings/index.ts @@ -4,5 +4,5 @@ * you may not use this file except in compliance with the Elastic License. */ -export * from './load_from_json_button'; -export * from './load_mappings_provider'; +export { LoadMappingsFromJsonButton } from './load_from_json_button'; +export { LoadMappingsProvider } from './load_mappings_provider'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx index a95579a8a141e..44a809a7a01bf 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form.tsx @@ -9,12 +9,11 @@ import { FormattedMessage } from '@kbn/i18n/react'; import { EuiText, EuiLink, EuiSpacer } from '@elastic/eui'; import { useForm, Form, SerializerFunc, UseField, JsonEditorField } from '../../shared_imports'; -import { Types, useDispatch } from '../../mappings_state'; +import { MappingsTemplates } from '../../types'; +import { useDispatch } from '../../mappings_state_context'; import { templatesFormSchema } from './templates_form_schema'; import { documentationService } from '../../../../services/documentation'; -type MappingsTemplates = Types['MappingsTemplates']; - interface Props { value?: MappingsTemplates; } diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts index 667b5685723d2..daca85f95b0b9 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/templates_form/templates_form_schema.ts @@ -7,7 +7,7 @@ import { i18n } from '@kbn/i18n'; import { FormSchema, fieldValidators } from '../../shared_imports'; -import { MappingsTemplates } from '../../reducer'; +import { MappingsTemplates } from '../../types'; const { isJsonField } = fieldValidators; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts index 29cfaf99c6559..00bb41663dd9c 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/index.ts @@ -4,12 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -export * from './mappings_editor'; +export { MappingsEditor } from './mappings_editor'; // We export both the button & the load mappings provider // to give flexibility to the consumer -export * from './components/load_mappings'; +export { LoadMappingsFromJsonButton, LoadMappingsProvider } from './components/load_mappings'; -export { OnUpdateHandler, Types } from './mappings_state'; +export { MappingsEditorProvider } from './mappings_editor_context'; -export { IndexSettings } from './types'; +export { IndexSettings, OnUpdateHandler } from './types'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx index 9e3637f970293..411193f10b24a 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/index_settings_context.tsx @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ import React, { createContext, useContext } from 'react'; + import { IndexSettings } from './types'; const IndexSettingsContext = createContext(undefined); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx index e8fda90737708..292882f1c5b4b 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor.tsx @@ -14,24 +14,40 @@ import { TemplatesForm, MultipleMappingsWarning, } from './components'; -import { IndexSettings } from './types'; +import { + OnUpdateHandler, + IndexSettings, + Field, + Mappings, + MappingsConfiguration, + MappingsTemplates, +} from './types'; import { extractMappingsDefinition } from './lib'; -import { State } from './reducer'; -import { MappingsState, Props as MappingsStateProps, Types } from './mappings_state'; +import { useMappingsState } from './mappings_state_context'; +import { useMappingsStateListener } from './use_state_listener'; import { IndexSettingsProvider } from './index_settings_context'; +type TabName = 'fields' | 'advanced' | 'templates'; + +interface MappingsEditorParsedMetadata { + parsedDefaultValue?: { + configuration: MappingsConfiguration; + fields: { [key: string]: Field }; + templates: MappingsTemplates; + }; + multipleMappingsDeclared: boolean; +} + interface Props { - onChange: MappingsStateProps['onChange']; + onChange: OnUpdateHandler; value?: { [key: string]: any }; indexSettings?: IndexSettings; } -type TabName = 'fields' | 'advanced' | 'templates'; - export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Props) => { - const [selectedTab, selectTab] = useState('fields'); - - const { parsedDefaultValue, multipleMappingsDeclared } = useMemo(() => { + const { parsedDefaultValue, multipleMappingsDeclared } = useMemo< + MappingsEditorParsedMetadata + >(() => { const mappingsDefinition = extractMappingsDefinition(value); if (mappingsDefinition === null) { @@ -69,18 +85,28 @@ export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Pr return { parsedDefaultValue: parsed, multipleMappingsDeclared: false }; }, [value]); + /** + * Hook that will listen to: + * 1. "value" prop changes in order to reset the mappings editor + * 2. "state" changes in order to communicate any updates to the consumer + */ + useMappingsStateListener({ onChange, value: parsedDefaultValue }); + + const state = useMappingsState(); + const [selectedTab, selectTab] = useState('fields'); + useEffect(() => { if (multipleMappingsDeclared) { // We set the data getter here as the user won't be able to make any changes onChange({ - getData: () => value! as Types['Mappings'], + getData: () => value! as Mappings, validate: () => Promise.resolve(true), isValid: true, }); } }, [multipleMappingsDeclared, onChange, value]); - const changeTab = async (tab: TabName, state: State) => { + const changeTab = async (tab: TabName) => { if (selectedTab === 'advanced') { // When we navigate away we need to submit the form to validate if there are any errors. const { isValid: isConfigurationFormValid } = await state.configuration.submitForm!(); @@ -102,59 +128,53 @@ export const MappingsEditor = React.memo(({ onChange, value, indexSettings }: Pr selectTab(tab); }; + const tabToContentMap = { + fields: , + templates: , + advanced: , + }; + return (
{multipleMappingsDeclared ? ( ) : ( - - {({ state }) => { - const tabToContentMap = { - fields: , - templates: , - advanced: , - }; - - return ( -
- - changeTab('fields', state)} - isSelected={selectedTab === 'fields'} - data-test-subj="formTab" - > - {i18n.translate('xpack.idxMgmt.mappingsEditor.fieldsTabLabel', { - defaultMessage: 'Mapped fields', - })} - - changeTab('templates', state)} - isSelected={selectedTab === 'templates'} - data-test-subj="formTab" - > - {i18n.translate('xpack.idxMgmt.mappingsEditor.templatesTabLabel', { - defaultMessage: 'Dynamic templates', - })} - - changeTab('advanced', state)} - isSelected={selectedTab === 'advanced'} - data-test-subj="formTab" - > - {i18n.translate('xpack.idxMgmt.mappingsEditor.advancedTabLabel', { - defaultMessage: 'Advanced options', - })} - - - - - - {tabToContentMap[selectedTab]} -
- ); - }} -
+
+ + changeTab('fields')} + isSelected={selectedTab === 'fields'} + data-test-subj="formTab" + > + {i18n.translate('xpack.idxMgmt.mappingsEditor.fieldsTabLabel', { + defaultMessage: 'Mapped fields', + })} + + changeTab('templates')} + isSelected={selectedTab === 'templates'} + data-test-subj="formTab" + > + {i18n.translate('xpack.idxMgmt.mappingsEditor.templatesTabLabel', { + defaultMessage: 'Dynamic templates', + })} + + changeTab('advanced')} + isSelected={selectedTab === 'advanced'} + data-test-subj="formTab" + > + {i18n.translate('xpack.idxMgmt.mappingsEditor.advancedTabLabel', { + defaultMessage: 'Advanced options', + })} + + + + + + {tabToContentMap[selectedTab]} +
)}
diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx new file mode 100644 index 0000000000000..596b49cc89ee8 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_editor_context.tsx @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; + +import { StateProvider } from './mappings_state_context'; + +export const MappingsEditorProvider: React.FC = ({ children }) => { + return {children}; +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx new file mode 100644 index 0000000000000..a402dec250056 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state_context.tsx @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useReducer, createContext, useContext } from 'react'; + +import { reducer } from './reducer'; +import { State, Dispatch } from './types'; + +const StateContext = createContext(undefined); +const DispatchContext = createContext(undefined); + +export const StateProvider: React.FC = ({ children }) => { + const initialState: State = { + isValid: true, + configuration: { + defaultValue: {}, + data: { + raw: {}, + format: () => ({}), + }, + validate: () => Promise.resolve(true), + }, + templates: { + defaultValue: {}, + data: { + raw: {}, + format: () => ({}), + }, + validate: () => Promise.resolve(true), + }, + fields: { + byId: {}, + rootLevelFields: [], + aliases: {}, + maxNestedDepth: 0, + }, + documentFields: { + status: 'idle', + editor: 'default', + }, + fieldsJsonEditor: { + format: () => ({}), + isValid: true, + }, + search: { + term: '', + result: [], + }, + }; + + const [state, dispatch] = useReducer(reducer, initialState); + + return ( + + {children} + + ); +}; + +export const useMappingsState = () => { + const ctx = useContext(StateContext); + if (ctx === undefined) { + throw new Error('useMappingsState must be used within a '); + } + return ctx; +}; + +export const useDispatch = () => { + const ctx = useContext(DispatchContext); + if (ctx === undefined) { + throw new Error('useDispatch must be used within a '); + } + return ctx; +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts index 27f8b12493008..18a8270117ea4 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/reducer.ts @@ -3,8 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { OnFormUpdateArg, FormHook } from './shared_imports'; -import { Field, NormalizedFields, NormalizedField, FieldsEditor, SearchResult } from './types'; +import { Field, NormalizedFields, NormalizedField, State, Action } from './types'; import { getFieldMeta, getUniqueId, @@ -17,99 +16,6 @@ import { } from './lib'; import { PARAMETERS_DEFINITION } from './constants'; -export interface MappingsConfiguration { - enabled?: boolean; - throwErrorsForUnmappedFields?: boolean; - date_detection: boolean; - numeric_detection: boolean; - dynamic_date_formats: string[]; - _source: { - enabled?: boolean; - includes?: string[]; - excludes?: string[]; - }; - _meta?: string; -} - -export interface MappingsTemplates { - dynamic_templates: DynamicTemplate[]; -} - -interface DynamicTemplate { - [key: string]: { - mapping: { - [key: string]: any; - }; - match_mapping_type?: string; - match?: string; - unmatch?: string; - match_pattern?: string; - path_match?: string; - path_unmatch?: string; - }; -} - -export interface MappingsFields { - [key: string]: any; -} - -type DocumentFieldsStatus = 'idle' | 'editingField' | 'creatingField'; - -interface DocumentFieldsState { - status: DocumentFieldsStatus; - editor: FieldsEditor; - fieldToEdit?: string; - fieldToAddFieldTo?: string; -} - -interface ConfigurationFormState extends OnFormUpdateArg { - defaultValue: MappingsConfiguration; - submitForm?: FormHook['submit']; -} - -interface TemplatesFormState extends OnFormUpdateArg { - defaultValue: MappingsTemplates; - submitForm?: FormHook['submit']; -} - -export interface State { - isValid: boolean | undefined; - configuration: ConfigurationFormState; - documentFields: DocumentFieldsState; - fields: NormalizedFields; - fieldForm?: OnFormUpdateArg; - fieldsJsonEditor: { - format(): MappingsFields; - isValid: boolean; - }; - search: { - term: string; - result: SearchResult[]; - }; - templates: TemplatesFormState; -} - -export type Action = - | { type: 'editor.replaceMappings'; value: { [key: string]: any } } - | { type: 'configuration.update'; value: Partial } - | { type: 'configuration.save'; value: MappingsConfiguration } - | { type: 'templates.update'; value: Partial } - | { type: 'templates.save'; value: MappingsTemplates } - | { type: 'fieldForm.update'; value: OnFormUpdateArg } - | { type: 'field.add'; value: Field } - | { type: 'field.remove'; value: string } - | { type: 'field.edit'; value: Field } - | { type: 'field.toggleExpand'; value: { fieldId: string; isExpanded?: boolean } } - | { type: 'documentField.createField'; value?: string } - | { type: 'documentField.editField'; value: string } - | { type: 'documentField.changeStatus'; value: DocumentFieldsStatus } - | { type: 'documentField.changeEditor'; value: FieldsEditor } - | { type: 'fieldsJsonEditor.update'; value: { json: { [key: string]: any }; isValid: boolean } } - | { type: 'search:update'; value: string } - | { type: 'validity:update'; value: boolean }; - -export type Dispatch = (action: Action) => void; - export const addFieldToState = (field: Field, state: State): State => { const updatedFields = { ...state.fields }; const id = getUniqueId(); @@ -277,7 +183,7 @@ export const reducer = (state: State, action: Action): State => { }, documentFields: { ...state.documentFields, - status: 'idle', + ...action.value.documentFields, fieldToAddFieldTo: undefined, fieldToEdit: undefined, }, diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts index 2979015c07455..097d039527950 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/shared_imports.ts @@ -49,4 +49,5 @@ export { export { JsonEditor, OnJsonEditorUpdateHandler, + GlobalFlyout, } from '../../../../../../../src/plugins/es_ui_shared/public'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts similarity index 65% rename from x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts rename to x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts index 5b18af68ed55b..a9f6d2ea03bdf 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/document_fields.ts @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { ReactNode, OptionHTMLAttributes } from 'react'; +import { ReactNode } from 'react'; -import { FieldConfig } from './shared_imports'; -import { PARAMETERS_DEFINITION } from './constants'; +import { GenericObject } from './mappings_editor'; + +import { FieldConfig } from '../shared_imports'; +import { PARAMETERS_DEFINITION } from '../constants'; export interface DataTypeDefinition { label: string; @@ -203,100 +205,7 @@ export interface NormalizedField extends FieldMeta { export type ChildFieldName = 'properties' | 'fields'; -export type FieldsEditor = 'default' | 'json'; - -export type SelectOption = { - value: unknown; - text: T | ReactNode; -} & OptionHTMLAttributes; - -export interface SuperSelectOption { - value: unknown; - inputDisplay?: ReactNode; - dropdownDisplay?: ReactNode; - disabled?: boolean; - 'data-test-subj'?: string; -} - export interface AliasOption { id: string; label: string; } - -export interface IndexSettingsInterface { - analysis?: { - analyzer: { - [key: string]: { - type: string; - tokenizer: string; - char_filter?: string[]; - filter?: string[]; - position_increment_gap?: number; - }; - }; - }; -} - -/** - * When we define the index settings we can skip - * the "index" property and directly add the "analysis". - * ES always returns the settings wrapped under "index". - */ -export type IndexSettings = IndexSettingsInterface | { index: IndexSettingsInterface }; - -export interface ComboBoxOption { - label: string; - value?: unknown; -} - -export interface SearchResult { - display: JSX.Element; - field: NormalizedField; -} - -export interface SearchMetadata { - /** - * Whether or not the search term match some part of the field path. - */ - matchPath: boolean; - /** - * If the search term matches the field type we will give it a higher score. - */ - matchType: boolean; - /** - * If the last word of the search terms matches the field name - */ - matchFieldName: boolean; - /** - * If the search term matches the beginning of the path we will give it a higher score - */ - matchStartOfPath: boolean; - /** - * If the last word of the search terms fully matches the field name - */ - fullyMatchFieldName: boolean; - /** - * If the search term exactly matches the field type - */ - fullyMatchType: boolean; - /** - * If the search term matches the full field path - */ - fullyMatchPath: boolean; - /** - * The score of the result that will allow us to sort the list - */ - score: number; - /** - * The JSX with tag wrapping the matched string - */ - display: JSX.Element; - /** - * The field path substring that matches the search - */ - stringMatch: string | null; -} - -export interface GenericObject { - [key: string]: any; -} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts new file mode 100644 index 0000000000000..cce2d550a68c1 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/index.ts @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './mappings_editor'; + +export * from './document_fields'; + +export * from './state'; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts new file mode 100644 index 0000000000000..1ca944024ae2b --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/mappings_editor.ts @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { ReactNode, OptionHTMLAttributes } from 'react'; + +import { NormalizedField } from './document_fields'; +import { Mappings } from './state'; + +export type OnUpdateHandler = (arg: OnUpdateHandlerArg) => void; + +export interface OnUpdateHandlerArg { + isValid?: boolean; + getData: () => Mappings | undefined; + validate: () => Promise; +} + +export type FieldsEditor = 'default' | 'json'; + +export interface IndexSettingsInterface { + analysis?: { + analyzer: { + [key: string]: { + type: string; + tokenizer: string; + char_filter?: string[]; + filter?: string[]; + position_increment_gap?: number; + }; + }; + }; +} + +/** + * When we define the index settings we can skip + * the "index" property and directly add the "analysis". + * ES always returns the settings wrapped under "index". + */ +export type IndexSettings = IndexSettingsInterface | { index: IndexSettingsInterface }; + +export type SelectOption = { + value: unknown; + text: T | ReactNode; +} & OptionHTMLAttributes; + +export interface ComboBoxOption { + label: string; + value?: unknown; +} + +export interface SuperSelectOption { + value: unknown; + inputDisplay?: ReactNode; + dropdownDisplay?: ReactNode; + disabled?: boolean; + 'data-test-subj'?: string; +} + +export interface SearchResult { + display: JSX.Element; + field: NormalizedField; +} + +export interface SearchMetadata { + /** + * Whether or not the search term match some part of the field path. + */ + matchPath: boolean; + /** + * If the search term matches the field type we will give it a higher score. + */ + matchType: boolean; + /** + * If the last word of the search terms matches the field name + */ + matchFieldName: boolean; + /** + * If the search term matches the beginning of the path we will give it a higher score + */ + matchStartOfPath: boolean; + /** + * If the last word of the search terms fully matches the field name + */ + fullyMatchFieldName: boolean; + /** + * If the search term exactly matches the field type + */ + fullyMatchType: boolean; + /** + * If the search term matches the full field path + */ + fullyMatchPath: boolean; + /** + * The score of the result that will allow us to sort the list + */ + score: number; + /** + * The JSX with tag wrapping the matched string + */ + display: JSX.Element; + /** + * The field path substring that matches the search + */ + stringMatch: string | null; +} + +export interface GenericObject { + [key: string]: any; +} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts new file mode 100644 index 0000000000000..34df70374aa88 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types/state.ts @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FormHook, OnFormUpdateArg } from '../shared_imports'; +import { Field, NormalizedFields } from './document_fields'; +import { FieldsEditor, SearchResult } from './mappings_editor'; + +export type Mappings = MappingsTemplates & + MappingsConfiguration & { + properties?: MappingsFields; + }; + +export interface MappingsConfiguration { + enabled?: boolean; + throwErrorsForUnmappedFields?: boolean; + date_detection?: boolean; + numeric_detection?: boolean; + dynamic_date_formats?: string[]; + _source?: { + enabled?: boolean; + includes?: string[]; + excludes?: string[]; + }; + _meta?: string; +} + +export interface MappingsTemplates { + dynamic_templates?: DynamicTemplate[]; +} + +export interface DynamicTemplate { + [key: string]: { + mapping: { + [key: string]: any; + }; + match_mapping_type?: string; + match?: string; + unmatch?: string; + match_pattern?: string; + path_match?: string; + path_unmatch?: string; + }; +} + +export interface MappingsFields { + [key: string]: any; +} + +export type DocumentFieldsStatus = 'idle' | 'editingField' | 'creatingField'; + +export interface DocumentFieldsState { + status: DocumentFieldsStatus; + editor: FieldsEditor; + fieldToEdit?: string; + fieldToAddFieldTo?: string; +} + +export interface ConfigurationFormState extends OnFormUpdateArg { + defaultValue: MappingsConfiguration; + submitForm?: FormHook['submit']; +} + +interface TemplatesFormState extends OnFormUpdateArg { + defaultValue: MappingsTemplates; + submitForm?: FormHook['submit']; +} + +export interface State { + isValid: boolean | undefined; + configuration: ConfigurationFormState; + documentFields: DocumentFieldsState; + fields: NormalizedFields; + fieldForm?: OnFormUpdateArg; + fieldsJsonEditor: { + format(): MappingsFields; + isValid: boolean; + }; + search: { + term: string; + result: SearchResult[]; + }; + templates: TemplatesFormState; +} + +export type Action = + | { type: 'editor.replaceMappings'; value: { [key: string]: any } } + | { type: 'configuration.update'; value: Partial } + | { type: 'configuration.save'; value: MappingsConfiguration } + | { type: 'templates.update'; value: Partial } + | { type: 'templates.save'; value: MappingsTemplates } + | { type: 'fieldForm.update'; value: OnFormUpdateArg } + | { type: 'field.add'; value: Field } + | { type: 'field.remove'; value: string } + | { type: 'field.edit'; value: Field } + | { type: 'field.toggleExpand'; value: { fieldId: string; isExpanded?: boolean } } + | { type: 'documentField.createField'; value?: string } + | { type: 'documentField.editField'; value: string } + | { type: 'documentField.changeStatus'; value: DocumentFieldsStatus } + | { type: 'documentField.changeEditor'; value: FieldsEditor } + | { type: 'fieldsJsonEditor.update'; value: { json: { [key: string]: any }; isValid: boolean } } + | { type: 'search:update'; value: string } + | { type: 'validity:update'; value: boolean }; + +export type Dispatch = (action: Action) => void; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx similarity index 53% rename from x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx rename to x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx index ad5056fa73ce1..f1ffd5356c977 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/mappings_state.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/use_state_listener.tsx @@ -3,92 +3,32 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - -import React, { useReducer, useEffect, createContext, useContext, useMemo, useRef } from 'react'; +import { useEffect, useMemo } from 'react'; import { - reducer, + Field, + Mappings, MappingsConfiguration, - MappingsFields, MappingsTemplates, - State, - Dispatch, -} from './reducer'; -import { Field } from './types'; + OnUpdateHandler, +} from './types'; import { normalize, deNormalize, stripUndefinedValues } from './lib'; +import { useMappingsState, useDispatch } from './mappings_state_context'; -type Mappings = MappingsTemplates & - MappingsConfiguration & { - properties?: MappingsFields; - }; - -export interface Types { - Mappings: Mappings; - MappingsConfiguration: MappingsConfiguration; - MappingsFields: MappingsFields; - MappingsTemplates: MappingsTemplates; -} - -export interface OnUpdateHandlerArg { - isValid?: boolean; - getData: () => Mappings | undefined; - validate: () => Promise; -} - -export type OnUpdateHandler = (arg: OnUpdateHandlerArg) => void; - -const StateContext = createContext(undefined); -const DispatchContext = createContext(undefined); - -export interface Props { - children: (params: { state: State }) => React.ReactNode; - value: { +interface Args { + onChange: OnUpdateHandler; + value?: { templates: MappingsTemplates; configuration: MappingsConfiguration; fields: { [key: string]: Field }; }; - onChange: OnUpdateHandler; } -export const MappingsState = React.memo(({ children, onChange, value }: Props) => { - const didMountRef = useRef(false); +export const useMappingsStateListener = ({ onChange, value }: Args) => { + const state = useMappingsState(); + const dispatch = useDispatch(); - const parsedFieldsDefaultValue = useMemo(() => normalize(value.fields), [value.fields]); - - const initialState: State = { - isValid: true, - configuration: { - defaultValue: value.configuration, - data: { - raw: value.configuration, - format: () => value.configuration, - }, - validate: () => Promise.resolve(true), - }, - templates: { - defaultValue: value.templates, - data: { - raw: value.templates, - format: () => value.templates, - }, - validate: () => Promise.resolve(true), - }, - fields: parsedFieldsDefaultValue, - documentFields: { - status: parsedFieldsDefaultValue.rootLevelFields.length === 0 ? 'creatingField' : 'idle', - editor: 'default', - }, - fieldsJsonEditor: { - format: () => ({}), - isValid: true, - }, - search: { - term: '', - result: [], - }, - }; - - const [state, dispatch] = useReducer(reducer, initialState); + const parsedFieldsDefaultValue = useMemo(() => normalize(value?.fields), [value?.fields]); useEffect(() => { // If we are creating a new field, but haven't entered any name @@ -158,46 +98,28 @@ export const MappingsState = React.memo(({ children, onChange, value }: Props) = }, isValid: state.isValid, }); - }, [state, onChange]); + }, [state, onChange, dispatch]); useEffect(() => { /** * If the value has changed that probably means that we have loaded * new data from JSON. We need to update our state with the new mappings. */ - if (didMountRef.current) { - dispatch({ - type: 'editor.replaceMappings', - value: { - configuration: value.configuration, - templates: value.templates, - fields: parsedFieldsDefaultValue, - }, - }); - } else { - didMountRef.current = true; + if (value === undefined) { + return; } - }, [value, parsedFieldsDefaultValue]); - - return ( - - {children({ state })} - - ); -}); - -export const useMappingsState = () => { - const ctx = useContext(StateContext); - if (ctx === undefined) { - throw new Error('useMappingsState must be used within a '); - } - return ctx; -}; -export const useDispatch = () => { - const ctx = useContext(DispatchContext); - if (ctx === undefined) { - throw new Error('useDispatch must be used within a '); - } - return ctx; + dispatch({ + type: 'editor.replaceMappings', + value: { + configuration: value.configuration, + templates: value.templates, + fields: parsedFieldsDefaultValue, + documentFields: { + status: parsedFieldsDefaultValue.rootLevelFields.length === 0 ? 'creatingField' : 'idle', + editor: 'default', + }, + }, + }); + }, [value, parsedFieldsDefaultValue, dispatch]); }; diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx index df0cc791384fe..ae831f4acf7ee 100644 --- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx +++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_components.tsx @@ -39,7 +39,7 @@ const i18nTexts = { ), }; -export const StepComponents = ({ defaultValue = [], onChange, esDocsBase }: Props) => { +export const StepComponents = ({ defaultValue, onChange, esDocsBase }: Props) => { const [state, setState] = useState<{ isLoadingComponents: boolean; components: ComponentTemplateListItem[]; diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx index f3d05ac38108a..fcc9795617ebb 100644 --- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx +++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import React, { useEffect } from 'react'; +import React, { useEffect, useCallback } from 'react'; import { EuiFlexGroup, EuiFlexItem, @@ -153,25 +153,18 @@ export const StepLogistics: React.FunctionComponent = React.memo( serializer: formSerializer, deserializer: formDeserializer, }); + const { subscribe, submit, isSubmitted, isValid: isFormValid, getErrors: getFormErrors } = form; /** * When the consumer call validate() on this step, we submit the form so it enters the "isSubmitted" state * and we can display the form errors on top of the forms if there are any. */ - const validate = async () => { - return (await form.submit()).isValid; - }; + const validate = useCallback(async () => { + return (await submit()).isValid; + }, [submit]); useEffect(() => { - onChange({ - isValid: form.isValid, - validate, - getData: form.getFormData, - }); - }, [form.isValid, onChange]); // eslint-disable-line react-hooks/exhaustive-deps - - useEffect(() => { - const subscription = form.subscribe(({ data, isValid }) => { + const subscription = subscribe(({ data, isValid }) => { onChange({ isValid, validate, @@ -179,7 +172,7 @@ export const StepLogistics: React.FunctionComponent = React.memo( }); }); return subscription.unsubscribe; - }, [onChange]); // eslint-disable-line react-hooks/exhaustive-deps + }, [onChange, validate, subscribe]); const { name, indexPatterns, dataStream, order, priority, version } = getFieldsMeta( documentationService.getEsDocsBase() @@ -204,7 +197,7 @@ export const StepLogistics: React.FunctionComponent = React.memo( @@ -220,8 +213,8 @@ export const StepLogistics: React.FunctionComponent = React.memo(
{/* Name */} diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_review.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_review.tsx index 0f4b9de4f6cfa..1b4f19dda99f7 100644 --- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_review.tsx +++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_review.tsx @@ -24,6 +24,7 @@ import { serializers } from '../../../../shared_imports'; import { serializeLegacyTemplate, serializeTemplate } from '../../../../../common/lib'; import { TemplateDeserialized, getTemplateParameter } from '../../../../../common'; +import { SimulateTemplate } from '../../index_templates'; import { WizardSection } from '../template_form'; const { stripEmptyFields } = serializers; @@ -56,6 +57,27 @@ interface Props { navigateToStep: (stepId: WizardSection) => void; } +const PreviewTab = ({ template }: { template: { [key: string]: any } }) => { + return ( +
+ + + +

+ +

+
+ + + + +
+ ); +}; + export const StepReview: React.FunctionComponent = React.memo( ({ template, navigateToStep }) => { const { @@ -286,6 +308,33 @@ export const StepReview: React.FunctionComponent = React.memo( ); }; + const tabs = [ + { + id: 'summary', + name: i18n.translate('xpack.idxMgmt.templateForm.stepReview.summaryTabTitle', { + defaultMessage: 'Summary', + }), + content: , + }, + { + id: 'request', + name: i18n.translate('xpack.idxMgmt.templateForm.stepReview.requestTabTitle', { + defaultMessage: 'Request', + }), + content: , + }, + ]; + + if (!isLegacy) { + tabs.splice(1, 0, { + id: 'preview', + name: i18n.translate('xpack.idxMgmt.templateForm.stepReview.previewTabTitle', { + defaultMessage: 'Preview', + }), + content: , + }); + } + return (
@@ -331,25 +380,7 @@ export const StepReview: React.FunctionComponent = React.memo( ) : null} - , - }, - { - id: 'request', - name: i18n.translate('xpack.idxMgmt.templateForm.stepReview.requestTabTitle', { - defaultMessage: 'Request', - }), - content: , - }, - ]} - /> +
); } diff --git a/x-pack/plugins/index_management/public/application/components/template_form/template_form.tsx b/x-pack/plugins/index_management/public/application/components/template_form/template_form.tsx index f5c9be9292cd0..fb0ba0b68fa6c 100644 --- a/x-pack/plugins/index_management/public/application/components/template_form/template_form.tsx +++ b/x-pack/plugins/index_management/public/application/components/template_form/template_form.tsx @@ -3,14 +3,19 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import React, { useCallback } from 'react'; +import React, { useState, useCallback } from 'react'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; -import { EuiSpacer } from '@elastic/eui'; +import { EuiSpacer, EuiButton } from '@elastic/eui'; import { TemplateDeserialized } from '../../../../common'; -import { serializers, Forms } from '../../../shared_imports'; +import { serializers, Forms, GlobalFlyout } from '../../../shared_imports'; import { SectionError } from '../section_error'; +import { + SimulateTemplateFlyoutContent, + SimulateTemplateProps, + simulateTemplateFlyoutProps, +} from '../index_templates'; import { StepLogisticsContainer, StepComponentContainer, StepReviewContainer } from './steps'; import { CommonWizardSteps, @@ -22,8 +27,10 @@ import { documentationService } from '../../services/documentation'; const { stripEmptyFields } = serializers; const { FormWizard, FormWizardStep } = Forms; +const { useGlobalFlyout } = GlobalFlyout; interface Props { + title: string | JSX.Element; onSave: (template: TemplateDeserialized) => void; clearSaveError: () => void; isSaving: boolean; @@ -80,6 +87,7 @@ const wizardSections: { [id: string]: { id: WizardSection; label: string } } = { }; export const TemplateForm = ({ + title, defaultValue, isEditing, isSaving, @@ -88,6 +96,9 @@ export const TemplateForm = ({ clearSaveError, onSave, }: Props) => { + const [wizardContent, setWizardContent] = useState | null>(null); + const { addContent: addContentToGlobalFlyout, closeFlyout } = useGlobalFlyout(); + const indexTemplate = defaultValue ?? { name: '', indexPatterns: [], @@ -189,6 +200,10 @@ export const TemplateForm = ({ [] ); + const onWizardContentChange = useCallback((content: Forms.Content) => { + setWizardContent(content); + }, []); + const onSaveTemplate = useCallback( async (wizardData: WizardContent) => { const template = buildTemplateObject(indexTemplate)(wizardData); @@ -206,44 +221,101 @@ export const TemplateForm = ({ [indexTemplate, buildTemplateObject, onSave, clearSaveError] ); + const getSimulateTemplate = useCallback(async () => { + if (!wizardContent) { + return; + } + const isValid = await wizardContent.validate(); + if (!isValid) { + return; + } + const wizardData = wizardContent.getData(); + const template = buildTemplateObject(indexTemplate)(wizardData); + return template; + }, [buildTemplateObject, indexTemplate, wizardContent]); + + const showPreviewFlyout = () => { + addContentToGlobalFlyout({ + id: 'simulateTemplate', + Component: SimulateTemplateFlyoutContent, + props: { + getTemplate: getSimulateTemplate, + onClose: closeFlyout, + }, + flyoutProps: simulateTemplateFlyoutProps, + }); + }; + + const getRightContentWizardNav = (stepId: WizardSection) => { + if (isLegacy) { + return null; + } + + // Don't show "Preview template" button on logistics and review steps + if (stepId === 'logistics' || stepId === 'review') { + return null; + } + + return ( + + + + ); + }; + return ( - - defaultValue={wizardDefaultValue} - onSave={onSaveTemplate} - isEditing={isEditing} - isSaving={isSaving} - apiError={apiError} - texts={i18nTexts} - > - - - + <> + {/* Form header */} + {title} - {indexTemplate._kbnMeta.isLegacy !== true && ( - - + + + + defaultValue={wizardDefaultValue} + onSave={onSaveTemplate} + isEditing={isEditing} + isSaving={isSaving} + apiError={apiError} + texts={i18nTexts} + onChange={onWizardContentChange} + rightContentNav={getRightContentWizardNav} + > + + - )} - - - + {indexTemplate._kbnMeta.isLegacy !== true && ( + + + + )} - - - + + + + + + + - - - + + + - - - - + + + + + ); }; diff --git a/x-pack/plugins/index_management/public/application/index.tsx b/x-pack/plugins/index_management/public/application/index.tsx index ebc29ac86a17f..f881c2e01cefc 100644 --- a/x-pack/plugins/index_management/public/application/index.tsx +++ b/x-pack/plugins/index_management/public/application/index.tsx @@ -11,11 +11,14 @@ import { render, unmountComponentAtNode } from 'react-dom'; import { CoreStart } from '../../../../../src/core/public'; import { API_BASE_PATH } from '../../common'; +import { GlobalFlyout } from '../shared_imports'; import { AppContextProvider, AppDependencies } from './app_context'; import { App } from './app'; import { indexManagementStore } from './store'; -import { ComponentTemplatesProvider } from './components'; +import { ComponentTemplatesProvider, MappingsEditorProvider } from './components'; + +const { GlobalFlyoutProvider } = GlobalFlyout; export const renderApp = ( elem: HTMLElement | null, @@ -43,9 +46,13 @@ export const renderApp = ( - - - + + + + + + + , diff --git a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/index.ts b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/index.ts index 08ebda2b5e437..11a86e78be99c 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/index.ts +++ b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/index.ts @@ -5,3 +5,4 @@ */ export { TabSummary } from './tab_summary'; +export { TabPreview } from './tab_preview'; diff --git a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/tab_preview.tsx b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/tab_preview.tsx new file mode 100644 index 0000000000000..ec52bcbab3b0b --- /dev/null +++ b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/tabs/tab_preview.tsx @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { EuiText, EuiSpacer } from '@elastic/eui'; +import { TemplateDeserialized } from '../../../../../../../common'; +import { SimulateTemplate } from '../../../../../components/index_templates'; + +interface Props { + templateDetails: TemplateDeserialized; +} + +export const TabPreview = ({ templateDetails }: Props) => { + return ( +
+ +

+ +

+
+ + + + +
+ ); +}; diff --git a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details.tsx b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details.tsx index faeca2f2487a8..c03f64880a700 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details.tsx +++ b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details.tsx @@ -15,8 +15,6 @@ export const TemplateDetails = (props: Props) => { onClose={props.onClose} data-test-subj="templateDetails" aria-labelledby="templateDetailsFlyoutTitle" - size="m" - maxWidth={500} > diff --git a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details_content.tsx b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details_content.tsx index 5b726013a1d92..5bacffc4c2404 100644 --- a/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details_content.tsx +++ b/x-pack/plugins/index_management/public/application/sections/home/template_list/template_details/template_details_content.tsx @@ -29,6 +29,7 @@ import { UIM_TEMPLATE_DETAIL_PANEL_SUMMARY_TAB, UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB, UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB, + UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB, } from '../../../../../../common/constants'; import { SendRequestResponse } from '../../../../../shared_imports'; import { TemplateDeleteModal, SectionLoading, SectionError, Error } from '../../../../components'; @@ -37,12 +38,13 @@ import { decodePathFromReactRouter } from '../../../../services/routing'; import { useServices } from '../../../../app_context'; import { TabAliases, TabMappings, TabSettings } from '../../../../components/shared'; import { TemplateTypeIndicator } from '../components'; -import { TabSummary } from './tabs'; +import { TabSummary, TabPreview } from './tabs'; const SUMMARY_TAB_ID = 'summary'; const MAPPINGS_TAB_ID = 'mappings'; const ALIASES_TAB_ID = 'aliases'; const SETTINGS_TAB_ID = 'settings'; +const PREVIEW_TAB_ID = 'preview'; const TABS = [ { @@ -69,6 +71,12 @@ const TABS = [ defaultMessage: 'Aliases', }), }, + { + id: PREVIEW_TAB_ID, + name: i18n.translate('xpack.idxMgmt.templateDetails.previewTabTitle', { + defaultMessage: 'Preview', + }), + }, ]; const tabToUiMetricMap: { [key: string]: string } = { @@ -76,6 +84,7 @@ const tabToUiMetricMap: { [key: string]: string } = { [SETTINGS_TAB_ID]: UIM_TEMPLATE_DETAIL_PANEL_SETTINGS_TAB, [MAPPINGS_TAB_ID]: UIM_TEMPLATE_DETAIL_PANEL_MAPPINGS_TAB, [ALIASES_TAB_ID]: UIM_TEMPLATE_DETAIL_PANEL_ALIASES_TAB, + [PREVIEW_TAB_ID]: UIM_TEMPLATE_DETAIL_PANEL_PREVIEW_TAB, }; export interface Props { @@ -161,6 +170,7 @@ export const TemplateDetailsContent = ({ [SETTINGS_TAB_ID]: , [MAPPINGS_TAB_ID]: , [ALIASES_TAB_ID]: , + [PREVIEW_TAB_ID]: , }; const tabContent = tabToComponentMap[activeTab]; @@ -191,7 +201,13 @@ export const TemplateDetailsContent = ({ {managedTemplateCallout} - {TABS.map((tab) => ( + {TABS.filter((tab) => { + // Legacy index templates don't have the "simulate" template API + if (isLegacy && tab.id === PREVIEW_TAB_ID) { + return false; + } + return true; + }).map((tab) => ( { uiMetricService.trackMetric('click', tabToUiMetricMap[tab.id]); diff --git a/x-pack/plugins/index_management/public/application/sections/template_clone/template_clone.tsx b/x-pack/plugins/index_management/public/application/sections/template_clone/template_clone.tsx index 82835c56a3877..2aaecbd64ee28 100644 --- a/x-pack/plugins/index_management/public/application/sections/template_clone/template_clone.tsx +++ b/x-pack/plugins/index_management/public/application/sections/template_clone/template_clone.tsx @@ -6,7 +6,7 @@ import React, { useEffect, useState } from 'react'; import { RouteComponentProps } from 'react-router-dom'; import { FormattedMessage } from '@kbn/i18n/react'; -import { EuiPageBody, EuiPageContent, EuiSpacer, EuiTitle } from '@elastic/eui'; +import { EuiPageBody, EuiPageContent, EuiTitle } from '@elastic/eui'; import { TemplateDeserialized } from '../../../../common'; import { TemplateForm, SectionLoading, SectionError, Error } from '../../components'; @@ -94,30 +94,30 @@ export const TemplateClone: React.FunctionComponent +

+ +

+ + } defaultValue={templateData} onSave={onSave} isSaving={isSaving} saveError={saveError} clearSaveError={clearSaveError} + isLegacy={isLegacy} /> ); } return ( - - -

- -

-
- - {content} -
+ {content}
); }; diff --git a/x-pack/plugins/index_management/public/application/sections/template_create/template_create.tsx b/x-pack/plugins/index_management/public/application/sections/template_create/template_create.tsx index fb82f52968eb4..691d2598d56d9 100644 --- a/x-pack/plugins/index_management/public/application/sections/template_create/template_create.tsx +++ b/x-pack/plugins/index_management/public/application/sections/template_create/template_create.tsx @@ -6,7 +6,7 @@ import React, { useEffect, useState } from 'react'; import { RouteComponentProps } from 'react-router-dom'; import { FormattedMessage } from '@kbn/i18n/react'; -import { EuiPageBody, EuiPageContent, EuiSpacer, EuiTitle } from '@elastic/eui'; +import { EuiPageBody, EuiPageContent, EuiTitle } from '@elastic/eui'; import { useLocation } from 'react-router-dom'; import { parse } from 'query-string'; @@ -51,23 +51,24 @@ export const TemplateCreate: React.FunctionComponent = ({ h return ( - -

- {isLegacy ? ( - - ) : ( - - )} -

-
- +

+ {isLegacy ? ( + + ) : ( + + )} +

+ + } onSave={onSave} isSaving={isSaving} saveError={saveError} diff --git a/x-pack/plugins/index_management/public/application/sections/template_edit/template_edit.tsx b/x-pack/plugins/index_management/public/application/sections/template_edit/template_edit.tsx index 29fd2e02120fc..6bdcd03fa5ca4 100644 --- a/x-pack/plugins/index_management/public/application/sections/template_edit/template_edit.tsx +++ b/x-pack/plugins/index_management/public/application/sections/template_edit/template_edit.tsx @@ -133,12 +133,24 @@ export const TemplateEdit: React.FunctionComponent )} +

+ +

+ + } defaultValue={template} onSave={onSave} isSaving={isSaving} saveError={saveError} clearSaveError={clearSaveError} isEditing={true} + isLegacy={isLegacy} /> ); @@ -147,19 +159,7 @@ export const TemplateEdit: React.FunctionComponent - - -

- -

-
- - {content} -
+ {content}
); }; diff --git a/x-pack/plugins/index_management/public/application/services/api.ts b/x-pack/plugins/index_management/public/application/services/api.ts index d7874ec2dcf32..546a0115ee4a9 100644 --- a/x-pack/plugins/index_management/public/application/services/api.ts +++ b/x-pack/plugins/index_management/public/application/services/api.ts @@ -30,6 +30,7 @@ import { UIM_TEMPLATE_CREATE, UIM_TEMPLATE_UPDATE, UIM_TEMPLATE_CLONE, + UIM_TEMPLATE_SIMULATE, } from '../../../common/constants'; import { TemplateDeserialized, TemplateListItem, DataStream } from '../../../common'; import { IndexMgmtMetricsType } from '../../types'; @@ -286,3 +287,14 @@ export async function updateTemplate(template: TemplateDeserialized) { return result; } + +export function simulateIndexTemplate(template: { [key: string]: any }) { + return sendRequest({ + path: `${API_BASE_PATH}/index_templates/simulate`, + method: 'post', + body: JSON.stringify(template), + }).then((result) => { + uiMetricService.trackMetric('count', UIM_TEMPLATE_SIMULATE); + return result; + }); +} diff --git a/x-pack/plugins/index_management/public/application/services/documentation.ts b/x-pack/plugins/index_management/public/application/services/documentation.ts index 972b4f4b25680..afc9c76f1afbe 100644 --- a/x-pack/plugins/index_management/public/application/services/documentation.ts +++ b/x-pack/plugins/index_management/public/application/services/documentation.ts @@ -40,8 +40,10 @@ class DocumentationService { return `${this.esDocsBase}/data-streams.html`; } - public getTemplatesDocumentationLink() { - return `${this.esDocsBase}/indices-templates.html`; + public getTemplatesDocumentationLink(isLegacy = false) { + return isLegacy + ? `${this.esDocsBase}/indices-templates-v1.html` + : `${this.esDocsBase}/indices-templates.html`; } public getIdxMgmtDocumentationLink() { diff --git a/x-pack/plugins/index_management/public/application/services/index.ts b/x-pack/plugins/index_management/public/application/services/index.ts index 2334d32adf131..a78e0bac14ae1 100644 --- a/x-pack/plugins/index_management/public/application/services/index.ts +++ b/x-pack/plugins/index_management/public/application/services/index.ts @@ -22,6 +22,7 @@ export { loadIndexMapping, loadIndexData, useLoadIndexTemplates, + simulateIndexTemplate, } from './api'; export { healthToColor } from './health_to_color'; export { sortTable } from './sort_table'; diff --git a/x-pack/plugins/index_management/public/shared_imports.ts b/x-pack/plugins/index_management/public/shared_imports.ts index 3f7fcf424f1f0..16dcab18c3caf 100644 --- a/x-pack/plugins/index_management/public/shared_imports.ts +++ b/x-pack/plugins/index_management/public/shared_imports.ts @@ -12,6 +12,7 @@ export { useRequest, Forms, extractQueryParams, + GlobalFlyout, } from '../../../../src/plugins/es_ui_shared/public/'; export { diff --git a/x-pack/plugins/index_management/server/client/elasticsearch.ts b/x-pack/plugins/index_management/server/client/elasticsearch.ts index 9f8bce241ae69..ed5ede07479ca 100644 --- a/x-pack/plugins/index_management/server/client/elasticsearch.ts +++ b/x-pack/plugins/index_management/server/client/elasticsearch.ts @@ -182,4 +182,14 @@ export const elasticsearchJsPlugin = (Client: any, config: any, components: any) ], method: 'HEAD', }); + + dataManagement.simulateTemplate = ca({ + urls: [ + { + fmt: '/_index_template/_simulate', + }, + ], + needBody: true, + method: 'POST', + }); }; diff --git a/x-pack/plugins/index_management/server/routes/api/templates/register_simulate_route.ts b/x-pack/plugins/index_management/server/routes/api/templates/register_simulate_route.ts new file mode 100644 index 0000000000000..9d078e135fd52 --- /dev/null +++ b/x-pack/plugins/index_management/server/routes/api/templates/register_simulate_route.ts @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema, TypeOf } from '@kbn/config-schema'; + +import { RouteDependencies } from '../../../types'; +import { addBasePath } from '../index'; + +const bodySchema = schema.object({}, { unknowns: 'allow' }); + +export function registerSimulateRoute({ router, license, lib }: RouteDependencies) { + router.post( + { + path: addBasePath('/index_templates/simulate'), + validate: { body: bodySchema }, + }, + license.guardApiRoute(async (ctx, req, res) => { + const { callAsCurrentUser } = ctx.dataManagement!.client; + const template = req.body as TypeOf; + + try { + const templatePreview = await callAsCurrentUser('dataManagement.simulateTemplate', { + body: template, + }); + + return res.ok({ body: templatePreview }); + } catch (e) { + if (lib.isEsError(e)) { + return res.customError({ + statusCode: e.statusCode, + body: e, + }); + } + // Case: default + return res.internalError({ body: e }); + } + }) + ); +} diff --git a/x-pack/plugins/index_management/server/routes/api/templates/register_template_routes.ts b/x-pack/plugins/index_management/server/routes/api/templates/register_template_routes.ts index 2b657346a2f82..e25f2abdfee78 100644 --- a/x-pack/plugins/index_management/server/routes/api/templates/register_template_routes.ts +++ b/x-pack/plugins/index_management/server/routes/api/templates/register_template_routes.ts @@ -10,6 +10,7 @@ import { registerGetAllRoute, registerGetOneRoute } from './register_get_routes' import { registerDeleteRoute } from './register_delete_route'; import { registerCreateRoute } from './register_create_route'; import { registerUpdateRoute } from './register_update_route'; +import { registerSimulateRoute } from './register_simulate_route'; export function registerTemplateRoutes(dependencies: RouteDependencies) { registerGetAllRoute(dependencies); @@ -17,4 +18,5 @@ export function registerTemplateRoutes(dependencies: RouteDependencies) { registerDeleteRoute(dependencies); registerCreateRoute(dependencies); registerUpdateRoute(dependencies); + registerSimulateRoute(dependencies); } diff --git a/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json b/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json index 4b10dab5d1ae5..e16edac5ddb7a 100644 --- a/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json +++ b/x-pack/plugins/ingest_manager/common/openapi/spec_oas3.json @@ -4203,6 +4203,7 @@ "FAILED", "STOPPING", "STOPPED", + "DEGRADED", "DATA_DUMP", "ACKNOWLEDGED", "UNKNOWN" diff --git a/x-pack/plugins/ingest_manager/common/types/index.ts b/x-pack/plugins/ingest_manager/common/types/index.ts index d7edc04a35799..7acef263f973a 100644 --- a/x-pack/plugins/ingest_manager/common/types/index.ts +++ b/x-pack/plugins/ingest_manager/common/types/index.ts @@ -22,8 +22,7 @@ export interface IngestManagerConfigType { host?: string; ca_sha256?: string; }; - agentConfigRollupRateLimitIntervalMs: number; - agentConfigRollupRateLimitRequestPerInterval: number; + agentConfigRolloutConcurrency: number; }; } diff --git a/x-pack/plugins/ingest_manager/common/types/models/agent.ts b/x-pack/plugins/ingest_manager/common/types/models/agent.ts index d3789c58a2c22..f31d33e73c76f 100644 --- a/x-pack/plugins/ingest_manager/common/types/models/agent.ts +++ b/x-pack/plugins/ingest_manager/common/types/models/agent.ts @@ -53,6 +53,7 @@ export interface NewAgentEvent { | 'FAILED' | 'STOPPING' | 'STOPPED' + | 'DEGRADED' // Action results | 'DATA_DUMP' // Actions diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/alpha_flyout.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/alpha_flyout.tsx index 03c70f71529c9..110d6de02c12b 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/alpha_flyout.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/alpha_flyout.tsx @@ -73,7 +73,7 @@ export const AlphaFlyout: React.FunctionComponent = ({ onClose }) => { - + = ({ onClose }) => { - + = ({ options={[ { id: 'logs', - label: i18n.translate( - 'xpack.ingestManager.agentConfigForm.monitoringLogsFieldLabel', - { defaultMessage: 'Collect agent logs' } + label: ( + <> + {' '} + + ), }, { id: 'metrics', - label: i18n.translate( - 'xpack.ingestManager.agentConfigForm.monitoringMetricsFieldLabel', - { defaultMessage: 'Collect agent metrics' } + label: ( + <> + {' '} + + ), }, ]} @@ -315,16 +347,14 @@ export const AgentConfigForm: React.FunctionComponent = ({ {!isEditing ? ( - - + } > - = ({ )} position="right" type="iInCircle" + color="subdued" /> } diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx index 6cf60fe1dc507..9c2d09b02665f 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/components/config_yaml_flyout.tsx @@ -72,7 +72,7 @@ export const ConfigYamlFlyout = memo<{ configId: string; onClose: () => void }>( - + = ({ /> + - +

+ +

); @@ -95,7 +99,7 @@ export const CreateAgentConfigFlyout: React.FunctionComponent = ({ - onClose()} flush="left"> + onClose()} flush="left"> ), + DEGRADED: ( + + + + ), DATA_DUMP: ( = ({ - + = ({ onCl - + = ({ - + - + diff --git a/x-pack/plugins/ingest_manager/server/index.ts b/x-pack/plugins/ingest_manager/server/index.ts index 6c72218abc531..40e0153a26581 100644 --- a/x-pack/plugins/ingest_manager/server/index.ts +++ b/x-pack/plugins/ingest_manager/server/index.ts @@ -35,8 +35,7 @@ export const config = { host: schema.maybe(schema.string()), ca_sha256: schema.maybe(schema.string()), }), - agentConfigRollupRateLimitIntervalMs: schema.number({ defaultValue: 5000 }), - agentConfigRollupRateLimitRequestPerInterval: schema.number({ defaultValue: 50 }), + agentConfigRolloutConcurrency: schema.number({ defaultValue: 10 }), }), }), }; diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts index e485fad09ba99..6cfe3d5b76266 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts @@ -48,7 +48,7 @@ export const getAgentHandler: RequestHandler): [Rx.Subscription, jest.Mock] { + const spy = jest.fn(); + const observer = o.subscribe(spy); + return [observer, spy]; +} + +describe('createSubscriberConcurrencyLimiter', () => { + it('should not publish to more than n concurrent subscriber', async () => { + const subject = new Rx.Subject(); + const sharedObservable = subject.pipe(share()); + + const limiter = createSubscriberConcurrencyLimiter(2); + + const [observer1, spy1] = createSpyObserver(sharedObservable.pipe(limiter())); + const [observer2, spy2] = createSpyObserver(sharedObservable.pipe(limiter())); + const [observer3, spy3] = createSpyObserver(sharedObservable.pipe(limiter())); + const [observer4, spy4] = createSpyObserver(sharedObservable.pipe(limiter())); + subject.next('test1'); + + expect(spy1).toBeCalled(); + expect(spy2).toBeCalled(); + expect(spy3).not.toBeCalled(); + expect(spy4).not.toBeCalled(); + + observer1.unsubscribe(); + expect(spy3).toBeCalled(); + expect(spy4).not.toBeCalled(); + + observer2.unsubscribe(); + expect(spy4).toBeCalled(); + + observer3.unsubscribe(); + observer4.unsubscribe(); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts index a806169019a1e..dc0ed35207e46 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts @@ -43,34 +43,23 @@ export const toPromiseAbortable = ( } }); -export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerInterval: number) { - function createCurrentInterval() { - return { - startedAt: Rx.asyncScheduler.now(), - numRequests: 0, - }; - } - - let currentInterval: { startedAt: number; numRequests: number } = createCurrentInterval(); +export function createSubscriberConcurrencyLimiter(maxConcurrency: number) { let observers: Array<[Rx.Subscriber, any]> = []; - let timerSubscription: Rx.Subscription | undefined; + let activeObservers: Array> = []; - function createTimeout() { - if (timerSubscription) { + function processNext() { + if (activeObservers.length >= maxConcurrency) { return; } - timerSubscription = Rx.asyncScheduler.schedule(() => { - timerSubscription = undefined; - currentInterval = createCurrentInterval(); - for (const [waitingObserver, value] of observers) { - if (currentInterval.numRequests >= ratelimitRequestPerInterval) { - createTimeout(); - continue; - } - currentInterval.numRequests++; - waitingObserver.next(value); - } - }, ratelimitIntervalMs); + const observerValuePair = observers.shift(); + + if (!observerValuePair) { + return; + } + + const [observer, value] = observerValuePair; + activeObservers.push(observer); + observer.next(value); } return function limit(): Rx.MonoTypeOperatorFunction { @@ -78,14 +67,8 @@ export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerIn new Rx.Observable((observer) => { const subscription = observable.subscribe({ next(value) { - if (currentInterval.numRequests < ratelimitRequestPerInterval) { - currentInterval.numRequests++; - observer.next(value); - return; - } - observers = [...observers, [observer, value]]; - createTimeout(); + processNext(); }, error(err) { observer.error(err); @@ -96,8 +79,10 @@ export function createLimiter(ratelimitIntervalMs: number, ratelimitRequestPerIn }); return () => { + activeObservers = activeObservers.filter((o) => o !== observer); observers = observers.filter((o) => o[0] !== observer); subscription.unsubscribe(); + processNext(); }; }); }; diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts index 69d61171b21fc..63f22b82611c2 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state.ts @@ -13,9 +13,11 @@ import { AGENT_UPDATE_LAST_CHECKIN_INTERVAL_MS } from '../../../constants'; function agentCheckinStateFactory() { const agentConnected = agentCheckinStateConnectedAgentsFactory(); - const newActions = agentCheckinStateNewActionsFactory(); + let newActions: ReturnType; let interval: NodeJS.Timeout; + function start() { + newActions = agentCheckinStateNewActionsFactory(); interval = setInterval(async () => { try { await agentConnected.updateLastCheckinAt(); @@ -31,15 +33,20 @@ function agentCheckinStateFactory() { } } return { - subscribeToNewActions: ( + subscribeToNewActions: async ( soClient: SavedObjectsClientContract, agent: Agent, options?: { signal: AbortSignal } - ) => - agentConnected.wrapPromise( + ) => { + if (!newActions) { + throw new Error('Agent checkin state not initialized'); + } + + return agentConnected.wrapPromise( agent.id, newActions.subscribeToNewActions(soClient, agent, options) - ), + ); + }, start, stop, }; diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts index 5ceb774a1946c..53270afe453c4 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts @@ -28,7 +28,7 @@ import * as APIKeysService from '../../api_keys'; import { AGENT_SAVED_OBJECT_TYPE, AGENT_UPDATE_ACTIONS_INTERVAL_MS } from '../../../constants'; import { createAgentAction, getNewActionsSince } from '../actions'; import { appContextService } from '../../app_context'; -import { toPromiseAbortable, AbortError, createLimiter } from './rxjs_utils'; +import { toPromiseAbortable, AbortError, createSubscriberConcurrencyLimiter } from './rxjs_utils'; function getInternalUserSOClient() { const fakeRequest = ({ @@ -134,9 +134,8 @@ export function agentCheckinStateNewActionsFactory() { const agentConfigs$ = new Map>(); const newActions$ = createNewActionsSharedObservable(); // Rx operators - const rateLimiter = createLimiter( - appContextService.getConfig()?.fleet.agentConfigRollupRateLimitIntervalMs || 5000, - appContextService.getConfig()?.fleet.agentConfigRollupRateLimitRequestPerInterval || 50 + const concurrencyLimiter = createSubscriberConcurrencyLimiter( + appContextService.getConfig()?.fleet.agentConfigRolloutConcurrency ?? 10 ); async function subscribeToNewActions( @@ -155,10 +154,11 @@ export function agentCheckinStateNewActionsFactory() { if (!agentConfig$) { throw new Error(`Invalid state no observable for config ${configId}`); } + const stream$ = agentConfig$.pipe( timeout(appContextService.getConfig()?.fleet.pollingRequestTimeout || 0), filter((config) => shouldCreateAgentConfigAction(agent, config)), - rateLimiter(), + concurrencyLimiter(), mergeMap((config) => createAgentActionFromConfig(soClient, agent, config)), merge(newActions$), mergeMap(async (data) => { diff --git a/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.test.ts index 635dce93f0027..54b40400bb4e7 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.test.ts @@ -84,6 +84,67 @@ foo: bar }); }); + describe('contains blocks', () => { + const streamTemplate = ` +input: log +paths: +{{#each paths}} + - {{this}} +{{/each}} +exclude_files: [".gz$"] +tags: +{{#each tags}} + - {{this}} +{{/each}} +{{#contains "forwarded" tags}} +publisher_pipeline.disable_host: true +{{/contains}} +processors: + - add_locale: ~ +password: {{password}} +{{#if password}} +hidden_password: {{password}} +{{/if}} + `; + + it('should support when a value is not contained in the array', () => { + const vars = { + paths: { value: ['/usr/local/var/log/nginx/access.log'] }, + password: { type: 'password', value: '' }, + tags: { value: ['foo', 'bar', 'forwarded'] }, + }; + + const output = createStream(vars, streamTemplate); + expect(output).toEqual({ + input: 'log', + paths: ['/usr/local/var/log/nginx/access.log'], + exclude_files: ['.gz$'], + processors: [{ add_locale: null }], + password: '', + 'publisher_pipeline.disable_host': true, + tags: ['foo', 'bar', 'forwarded'], + }); + }); + + it('should support when a value is contained in the array', () => { + const vars = { + paths: { value: ['/usr/local/var/log/nginx/access.log'] }, + password: { type: 'password', value: '' }, + tags: { value: ['foo', 'bar'] }, + }; + + const output = createStream(vars, streamTemplate); + expect(output).toEqual({ + input: 'log', + paths: ['/usr/local/var/log/nginx/access.log'], + exclude_files: ['.gz$'], + processors: [{ add_locale: null }], + password: '', + tags: ['foo', 'bar'], + }); + }); + }); + it('should support optional yaml values at root level', () => { const streamTemplate = ` input: logs diff --git a/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.ts b/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.ts index d697ad0576396..88c54d213554c 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/agent/agent.ts @@ -8,10 +8,12 @@ import Handlebars from 'handlebars'; import { safeLoad, safeDump } from 'js-yaml'; import { PackageConfigConfigRecord } from '../../../../common'; +const handlebars = Handlebars.create(); + export function createStream(variables: PackageConfigConfigRecord, streamTemplate: string) { const { vars, yamlValues } = buildTemplateVariables(variables, streamTemplate); - const template = Handlebars.compile(streamTemplate, { noEscape: true }); + const template = handlebars.compile(streamTemplate, { noEscape: true }); let stream = template(vars); stream = replaceRootLevelYamlVariables(yamlValues, stream); @@ -87,6 +89,16 @@ function buildTemplateVariables(variables: PackageConfigConfigRecord, streamTemp return { vars, yamlValues }; } +function containsHelper(this: any, item: string, list: string[], options: any) { + if (Array.isArray(list) && list.includes(item)) { + if (options && options.fn) { + return options.fn(this); + } + } + return ''; +} +handlebars.registerHelper('contains', containsHelper); + function replaceRootLevelYamlVariables(yamlVariables: { [k: string]: any }, yamlTemplate: string) { if (Object.keys(yamlVariables).length === 0 || !yamlTemplate) { return yamlTemplate; diff --git a/x-pack/plugins/ingest_manager/server/types/models/agent.ts b/x-pack/plugins/ingest_manager/server/types/models/agent.ts index 1b396db9b0c88..ea5eacc8e63d0 100644 --- a/x-pack/plugins/ingest_manager/server/types/models/agent.ts +++ b/x-pack/plugins/ingest_manager/server/types/models/agent.ts @@ -22,13 +22,16 @@ const AgentEventBase = { ]), subtype: schema.oneOf([ // State - schema.literal('RUNNING'), - schema.literal('STARTING'), - schema.literal('IN_PROGRESS'), - schema.literal('CONFIG'), - schema.literal('FAILED'), - schema.literal('STOPPING'), - schema.literal('STOPPED'), + schema.oneOf([ + schema.literal('RUNNING'), + schema.literal('STARTING'), + schema.literal('IN_PROGRESS'), + schema.literal('CONFIG'), + schema.literal('FAILED'), + schema.literal('STOPPING'), + schema.literal('STOPPED'), + schema.literal('DEGRADED'), + ]), // Action results schema.literal('DATA_DUMP'), // Actions diff --git a/x-pack/plugins/lens/public/app_plugin/app.tsx b/x-pack/plugins/lens/public/app_plugin/app.tsx index 9b8b9a8531cf0..082a3afcd513e 100644 --- a/x-pack/plugins/lens/public/app_plugin/app.tsx +++ b/x-pack/plugins/lens/public/app_plugin/app.tsx @@ -44,6 +44,7 @@ interface State { isLoading: boolean; isSaveModalVisible: boolean; indexPatternsForTopNav: IndexPatternInstance[]; + originatingApp?: string; persistedDoc?: Document; lastKnownDoc?: Document; @@ -97,6 +98,7 @@ export function App({ fromDate: currentRange.from, toDate: currentRange.to, }, + originatingApp, filters: [], indicateNoData: false, }; @@ -321,9 +323,14 @@ export function App({ .then(({ id }) => { // Prevents unnecessary network request and disables save button const newDoc = { ...doc, id }; + const currentOriginatingApp = state.originatingApp; setState((s) => ({ ...s, isSaveModalVisible: false, + originatingApp: + saveProps.newCopyOnSave && !saveProps.returnToOrigin + ? undefined + : currentOriginatingApp, persistedDoc: newDoc, lastKnownDoc: newDoc, })); @@ -368,7 +375,7 @@ export function App({
{ if (isSaveable && lastKnownDoc) { setState((s) => ({ ...s, isSaveModalVisible: true })); @@ -523,7 +530,7 @@ export function App({
{lastKnownDoc && state.isSaveModalVisible && ( runSave(props)} onClose={() => setState((s) => ({ ...s, isSaveModalVisible: false }))} documentInfo={{ diff --git a/x-pack/plugins/lists/common/constants.ts b/x-pack/plugins/lists/common/constants.ts index 7bb83cddd4331..df16085b53405 100644 --- a/x-pack/plugins/lists/common/constants.ts +++ b/x-pack/plugins/lists/common/constants.ts @@ -44,7 +44,7 @@ export const ENDPOINT_LIST_ITEM_URL = '/api/endpoint_list/items'; export const ENDPOINT_LIST_ID = 'endpoint_list'; /** The name of the single global space agnostic endpoint list */ -export const ENDPOINT_LIST_NAME = 'Elastic Endpoint Exception List'; +export const ENDPOINT_LIST_NAME = 'Elastic Endpoint Security Exception List'; /** The description of the single global space agnostic endpoint list */ -export const ENDPOINT_LIST_DESCRIPTION = 'Elastic Endpoint Exception List'; +export const ENDPOINT_LIST_DESCRIPTION = 'Elastic Endpoint Security Exception List'; diff --git a/x-pack/plugins/lists/public/shared_exports.ts b/x-pack/plugins/lists/public/shared_exports.ts index 56341035f839f..16026a436f154 100644 --- a/x-pack/plugins/lists/public/shared_exports.ts +++ b/x-pack/plugins/lists/public/shared_exports.ts @@ -5,7 +5,9 @@ */ // Exports to be shared with plugins +export { withOptionalSignal } from './common/with_optional_signal'; export { useIsMounted } from './common/hooks/use_is_mounted'; +export { useAsync } from './common/hooks/use_async'; export { useApi } from './exceptions/hooks/use_api'; export { usePersistExceptionItem } from './exceptions/hooks/persist_exception_item'; export { usePersistExceptionList } from './exceptions/hooks/persist_exception_list'; diff --git a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts index b596b831f2d68..2e9bb1325632e 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts @@ -60,7 +60,7 @@ export const createEndpointList = async ({ ); return transformSavedObjectToExceptionList({ savedObject }); } catch (err) { - if (err.status === 409) { + if (savedObjectsClient.errors.isConflictError(err)) { return null; } else { throw err; diff --git a/x-pack/plugins/maps/common/descriptor_types/sources.ts b/x-pack/plugins/maps/common/descriptor_types/sources.ts index 7eda37bf53351..6e8884d942e19 100644 --- a/x-pack/plugins/maps/common/descriptor_types/sources.ts +++ b/x-pack/plugins/maps/common/descriptor_types/sources.ts @@ -168,6 +168,7 @@ export type LayerDescriptor = { __trackedLayerDescriptor?: LayerDescriptor; alpha?: number; id: string; + joins?: JoinDescriptor[]; label?: string | null; areLabelsOnTop?: boolean; minZoom?: number; @@ -180,7 +181,6 @@ export type LayerDescriptor = { }; export type VectorLayerDescriptor = LayerDescriptor & { - joins?: JoinDescriptor[]; style?: VectorStyleDescriptor; }; diff --git a/x-pack/plugins/maps/public/actions/layer_actions.ts b/x-pack/plugins/maps/public/actions/layer_actions.ts index a0d2152e8866c..208f6dc6c6f85 100644 --- a/x-pack/plugins/maps/public/actions/layer_actions.ts +++ b/x-pack/plugins/maps/public/actions/layer_actions.ts @@ -35,12 +35,7 @@ import { UPDATE_LAYER_STYLE, UPDATE_SOURCE_PROP, } from './map_action_constants'; -import { - clearDataRequests, - syncDataForLayerId, - syncDataForLayer, - updateStyleMeta, -} from './data_request_actions'; +import { clearDataRequests, syncDataForLayerId, updateStyleMeta } from './data_request_actions'; import { cleanTooltipStateForLayer } from './tooltip_actions'; import { JoinDescriptor, LayerDescriptor, StyleDescriptor } from '../../common/descriptor_types'; import { ILayer } from '../classes/layers/layer'; @@ -175,7 +170,7 @@ export function promotePreviewLayers() { } export function setLayerVisibility(layerId: string, makeVisible: boolean) { - return async (dispatch: Dispatch, getState: () => MapStoreState) => { + return (dispatch: Dispatch, getState: () => MapStoreState) => { // if the current-state is invisible, we also want to sync data // e.g. if a layer was invisible at start-up, it won't have any data loaded const layer = getLayerById(layerId, getState()); @@ -189,19 +184,19 @@ export function setLayerVisibility(layerId: string, makeVisible: boolean) { dispatch(cleanTooltipStateForLayer(layerId)); } - await dispatch({ + dispatch({ type: SET_LAYER_VISIBILITY, layerId, visibility: makeVisible, }); if (makeVisible) { - dispatch(syncDataForLayer(layer)); + dispatch(syncDataForLayerId(layerId)); } }; } export function toggleLayerVisible(layerId: string) { - return async (dispatch: Dispatch, getState: () => MapStoreState) => { + return (dispatch: Dispatch, getState: () => MapStoreState) => { const layer = getLayerById(layerId, getState()); if (!layer) { return; diff --git a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.test.tsx b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.test.tsx new file mode 100644 index 0000000000000..5d234f5be44af --- /dev/null +++ b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.test.tsx @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { SCALING_TYPES, SOURCE_TYPES } from '../../../../common/constants'; +import { BlendedVectorLayer } from './blended_vector_layer'; +// @ts-expect-error +import { ESSearchSource } from '../../sources/es_search_source'; +import { ESGeoGridSourceDescriptor } from '../../../../common/descriptor_types'; + +jest.mock('../../../kibana_services', () => { + return { + getIsDarkMode() { + return false; + }, + }; +}); + +const mapColors: string[] = []; + +const notClusteredDataRequest = { + data: { isSyncClustered: false }, + dataId: 'ACTIVE_COUNT_DATA_ID', +}; + +const clusteredDataRequest = { + data: { isSyncClustered: true }, + dataId: 'ACTIVE_COUNT_DATA_ID', +}; + +const documentSourceDescriptor = ESSearchSource.createDescriptor({ + geoField: 'myGeoField', + indexPatternId: 'myIndexPattern', + scalingType: SCALING_TYPES.CLUSTERS, +}); + +describe('getSource', () => { + describe('isClustered: true', () => { + test('should return cluster source', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource(documentSourceDescriptor), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [clusteredDataRequest], + }, + mapColors + ), + }); + + const source = blendedVectorLayer.getSource(); + expect(source.cloneDescriptor().type).toBe(SOURCE_TYPES.ES_GEO_GRID); + }); + + test('cluster source applyGlobalQuery should be true when document source applyGlobalQuery is true', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource(documentSourceDescriptor), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [clusteredDataRequest], + }, + mapColors + ), + }); + + const source = blendedVectorLayer.getSource(); + expect((source.cloneDescriptor() as ESGeoGridSourceDescriptor).applyGlobalQuery).toBe(true); + }); + + test('cluster source applyGlobalQuery should be false when document source applyGlobalQuery is false', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource({ + ...documentSourceDescriptor, + applyGlobalQuery: false, + }), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [clusteredDataRequest], + }, + mapColors + ), + }); + + const source = blendedVectorLayer.getSource(); + expect((source.cloneDescriptor() as ESGeoGridSourceDescriptor).applyGlobalQuery).toBe(false); + }); + }); + + describe('isClustered: false', () => { + test('should return document source', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource(documentSourceDescriptor), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [notClusteredDataRequest], + }, + mapColors + ), + }); + + const source = blendedVectorLayer.getSource(); + expect(source.cloneDescriptor().type).toBe(SOURCE_TYPES.ES_SEARCH); + }); + }); +}); + +describe('cloneDescriptor', () => { + describe('isClustered: true', () => { + test('Cloned layer descriptor sourceDescriptor should be document source', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource(documentSourceDescriptor), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [clusteredDataRequest], + }, + mapColors + ), + }); + + const clonedLayerDescriptor = await blendedVectorLayer.cloneDescriptor(); + expect(clonedLayerDescriptor.sourceDescriptor!.type).toBe(SOURCE_TYPES.ES_SEARCH); + expect(clonedLayerDescriptor.label).toBe('Clone of myIndexPattern'); + }); + }); + + describe('isClustered: false', () => { + test('Cloned layer descriptor sourceDescriptor should be document source', async () => { + const blendedVectorLayer = new BlendedVectorLayer({ + source: new ESSearchSource(documentSourceDescriptor), + layerDescriptor: BlendedVectorLayer.createDescriptor( + { + sourceDescriptor: documentSourceDescriptor, + __dataRequests: [notClusteredDataRequest], + }, + mapColors + ), + }); + + const clonedLayerDescriptor = await blendedVectorLayer.cloneDescriptor(); + expect(clonedLayerDescriptor.sourceDescriptor!.type).toBe(SOURCE_TYPES.ES_SEARCH); + expect(clonedLayerDescriptor.label).toBe('Clone of myIndexPattern'); + }); + }); +}); diff --git a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts index da28574189e6a..950d9890a3c65 100644 --- a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts +++ b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts @@ -34,6 +34,7 @@ import { SizeDynamicOptions, DynamicStylePropertyOptions, StylePropertyOptions, + LayerDescriptor, VectorLayerDescriptor, } from '../../../../common/descriptor_types'; import { IStyle } from '../../styles/style'; @@ -216,7 +217,7 @@ export class BlendedVectorLayer extends VectorLayer implements IVectorLayer { } } - async getDisplayName(source: ISource) { + async getDisplayName(source?: ISource) { const displayName = await super.getDisplayName(source); return this._isClustered ? i18n.translate('xpack.maps.blendedVectorLayer.clusteredLayerName', { @@ -242,6 +243,19 @@ export class BlendedVectorLayer extends VectorLayer implements IVectorLayer { return false; } + async cloneDescriptor(): Promise { + const clonedDescriptor = await super.cloneDescriptor(); + + // Use super getDisplayName instead of instance getDisplayName to avoid getting 'Clustered Clone of Clustered' + const displayName = await super.getDisplayName(); + clonedDescriptor.label = `Clone of ${displayName}`; + + // sourceDescriptor must be document source descriptor + clonedDescriptor.sourceDescriptor = this._documentSource.cloneDescriptor(); + + return clonedDescriptor; + } + getSource() { return this._isClustered ? this._clusterSource : this._documentSource; } diff --git a/x-pack/plugins/maps/public/classes/layers/layer.test.ts b/x-pack/plugins/maps/public/classes/layers/layer.test.ts new file mode 100644 index 0000000000000..f25ecd7106457 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/layers/layer.test.ts @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +/* eslint-disable max-classes-per-file */ + +import { AbstractLayer } from './layer'; +import { ISource } from '../sources/source'; +import { IStyle } from '../styles/style'; +import { AGG_TYPE, FIELD_ORIGIN, LAYER_STYLE_TYPE, VECTOR_STYLES } from '../../../common/constants'; +import { ESTermSourceDescriptor, VectorStyleDescriptor } from '../../../common/descriptor_types'; +import { getDefaultDynamicProperties } from '../styles/vector/vector_style_defaults'; + +jest.mock('uuid/v4', () => { + return function () { + return '12345'; + }; +}); + +class MockLayer extends AbstractLayer {} + +class MockSource { + cloneDescriptor() { + return {}; + } + + getDisplayName() { + return 'mySource'; + } +} + +class MockStyle {} + +describe('cloneDescriptor', () => { + describe('with joins', () => { + const styleDescriptor = { + type: LAYER_STYLE_TYPE.VECTOR, + properties: { + ...getDefaultDynamicProperties(), + }, + } as VectorStyleDescriptor; + // @ts-expect-error + styleDescriptor.properties[VECTOR_STYLES.FILL_COLOR].options.field = { + name: '__kbnjoin__count__557d0f15', + origin: FIELD_ORIGIN.JOIN, + }; + // @ts-expect-error + styleDescriptor.properties[VECTOR_STYLES.LINE_COLOR].options.field = { + name: 'bytes', + origin: FIELD_ORIGIN.SOURCE, + }; + // @ts-expect-error + styleDescriptor.properties[VECTOR_STYLES.LABEL_BORDER_COLOR].options.field = { + name: '__kbnjoin__count__6666666666', + origin: FIELD_ORIGIN.JOIN, + }; + + test('Should update data driven styling properties using join fields', async () => { + const layerDescriptor = AbstractLayer.createDescriptor({ + style: styleDescriptor, + joins: [ + { + leftField: 'iso2', + right: { + id: '557d0f15', + indexPatternId: 'myIndexPattern', + indexPatternTitle: 'logs-*', + metrics: [{ type: AGG_TYPE.COUNT }], + term: 'myTermField', + type: 'joinSource', + }, + }, + ], + }); + const layer = new MockLayer({ + layerDescriptor, + source: (new MockSource() as unknown) as ISource, + style: (new MockStyle() as unknown) as IStyle, + }); + const clonedDescriptor = await layer.cloneDescriptor(); + const clonedStyleProps = (clonedDescriptor.style as VectorStyleDescriptor).properties; + // Should update style field belonging to join + // @ts-expect-error + expect(clonedStyleProps[VECTOR_STYLES.FILL_COLOR].options.field.name).toEqual( + '__kbnjoin__count__12345' + ); + // Should not update style field belonging to source + // @ts-expect-error + expect(clonedStyleProps[VECTOR_STYLES.LINE_COLOR].options.field.name).toEqual('bytes'); + // Should not update style feild belonging to different join + // @ts-expect-error + expect(clonedStyleProps[VECTOR_STYLES.LABEL_BORDER_COLOR].options.field.name).toEqual( + '__kbnjoin__count__6666666666' + ); + }); + + test('Should update data driven styling properties using join fields when metrics are not provided', async () => { + const layerDescriptor = AbstractLayer.createDescriptor({ + style: styleDescriptor, + joins: [ + { + leftField: 'iso2', + right: ({ + id: '557d0f15', + indexPatternId: 'myIndexPattern', + indexPatternTitle: 'logs-*', + term: 'myTermField', + type: 'joinSource', + } as unknown) as ESTermSourceDescriptor, + }, + ], + }); + const layer = new MockLayer({ + layerDescriptor, + source: (new MockSource() as unknown) as ISource, + style: (new MockStyle() as unknown) as IStyle, + }); + const clonedDescriptor = await layer.cloneDescriptor(); + const clonedStyleProps = (clonedDescriptor.style as VectorStyleDescriptor).properties; + // Should update style field belonging to join + // @ts-expect-error + expect(clonedStyleProps[VECTOR_STYLES.FILL_COLOR].options.field.name).toEqual( + '__kbnjoin__count__12345' + ); + }); + }); +}); diff --git a/x-pack/plugins/maps/public/classes/layers/layer.tsx b/x-pack/plugins/maps/public/classes/layers/layer.tsx index d8def155a9185..424100c5a7e3a 100644 --- a/x-pack/plugins/maps/public/classes/layers/layer.tsx +++ b/x-pack/plugins/maps/public/classes/layers/layer.tsx @@ -14,16 +14,26 @@ import { i18n } from '@kbn/i18n'; import { FeatureCollection } from 'geojson'; import { DataRequest } from '../util/data_request'; import { + AGG_TYPE, + FIELD_ORIGIN, MAX_ZOOM, MB_SOURCE_ID_LAYER_ID_PREFIX_DELIMITER, MIN_ZOOM, SOURCE_DATA_REQUEST_ID, + STYLE_TYPE, } from '../../../common/constants'; import { copyPersistentState } from '../../reducers/util'; -import { LayerDescriptor, MapExtent, StyleDescriptor } from '../../../common/descriptor_types'; +import { + AggDescriptor, + JoinDescriptor, + LayerDescriptor, + MapExtent, + StyleDescriptor, +} from '../../../common/descriptor_types'; import { Attribution, ImmutableSourceProperty, ISource, SourceEditorArgs } from '../sources/source'; import { DataRequestContext } from '../../actions'; import { IStyle } from '../styles/style'; +import { getJoinAggKey } from '../../../common/get_agg_key'; export interface ILayer { getBounds(dataRequestContext: DataRequestContext): Promise; @@ -157,10 +167,43 @@ export class AbstractLayer implements ILayer { clonedDescriptor.sourceDescriptor = this.getSource().cloneDescriptor(); if (clonedDescriptor.joins) { - // @ts-expect-error - clonedDescriptor.joins.forEach((joinDescriptor) => { + clonedDescriptor.joins.forEach((joinDescriptor: JoinDescriptor) => { + const originalJoinId = joinDescriptor.right.id!; + // right.id is uuid used to track requests in inspector joinDescriptor.right.id = uuid(); + + // Update all data driven styling properties using join fields + if (clonedDescriptor.style && 'properties' in clonedDescriptor.style) { + const metrics = + joinDescriptor.right.metrics && joinDescriptor.right.metrics.length + ? joinDescriptor.right.metrics + : [{ type: AGG_TYPE.COUNT }]; + metrics.forEach((metricsDescriptor: AggDescriptor) => { + const originalJoinKey = getJoinAggKey({ + aggType: metricsDescriptor.type, + aggFieldName: metricsDescriptor.field ? metricsDescriptor.field : '', + rightSourceId: originalJoinId, + }); + const newJoinKey = getJoinAggKey({ + aggType: metricsDescriptor.type, + aggFieldName: metricsDescriptor.field ? metricsDescriptor.field : '', + rightSourceId: joinDescriptor.right.id!, + }); + + Object.keys(clonedDescriptor.style.properties).forEach((key) => { + const styleProp = clonedDescriptor.style.properties[key]; + if ( + styleProp.type === STYLE_TYPE.DYNAMIC && + styleProp.options.field && + styleProp.options.field.origin === FIELD_ORIGIN.JOIN && + styleProp.options.field.name === originalJoinKey + ) { + styleProp.options.field.name = newJoinKey; + } + }); + }); + } }); } return clonedDescriptor; diff --git a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/__snapshots__/xyz_tms_editor.test.tsx.snap b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/__snapshots__/xyz_tms_editor.test.tsx.snap new file mode 100644 index 0000000000000..b8ed4a727fad0 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/__snapshots__/xyz_tms_editor.test.tsx.snap @@ -0,0 +1,237 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`attribution validation should provide no validation errors when attribution text and attribution url are provided 1`] = ` + + + + + + + + + + + +`; + +exports[`attribution validation should provide validation error when attribution text is provided without attribution url 1`] = ` + + + + + + + + + + + +`; + +exports[`attribution validation should provide validation error when attribution url is provided without attribution text 1`] = ` + + + + + + + + + + + +`; + +exports[`should render 1`] = ` + + + + + + + + + + + +`; diff --git a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/layer_wizard.tsx b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/layer_wizard.tsx index 48c526855d3a4..b0344a3e0e318 100644 --- a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/layer_wizard.tsx +++ b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/layer_wizard.tsx @@ -19,7 +19,12 @@ export const tmsLayerWizardConfig: LayerWizard = { }), icon: 'grid', renderWizard: ({ previewLayers }: RenderWizardArguments) => { - const onSourceConfigChange = (sourceConfig: XYZTMSSourceConfig) => { + const onSourceConfigChange = (sourceConfig: XYZTMSSourceConfig | null) => { + if (!sourceConfig) { + previewLayers([]); + return; + } + const layerDescriptor = TileLayer.createDescriptor({ sourceDescriptor: XYZTMSSource.createDescriptor(sourceConfig), }); diff --git a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.test.tsx b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.test.tsx new file mode 100644 index 0000000000000..71f78c3e15152 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.test.tsx @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { shallow } from 'enzyme'; +import { XYZTMSEditor } from './xyz_tms_editor'; + +const onSourceConfigChange = () => {}; + +test('should render', () => { + const component = shallow(); + expect(component).toMatchSnapshot(); +}); + +describe('attribution validation', () => { + test('should provide validation error when attribution text is provided without attribution url', () => { + const component = shallow(); + component.setState({ attributionText: 'myAttribtionLabel' }); + expect(component).toMatchSnapshot(); + }); + + test('should provide validation error when attribution url is provided without attribution text', () => { + const component = shallow(); + component.setState({ attributionUrl: 'http://mySource' }); + expect(component).toMatchSnapshot(); + }); + + test('should provide no validation errors when attribution text and attribution url are provided', () => { + const component = shallow(); + component.setState({ attributionText: 'myAttribtionLabel' }); + component.setState({ attributionUrl: 'http://mySource' }); + expect(component).toMatchSnapshot(); + }); +}); diff --git a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.tsx b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.tsx index bf5f2c3dfe04d..5583f637b4471 100644 --- a/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.tsx +++ b/x-pack/plugins/maps/public/classes/sources/xyz_tms_source/xyz_tms_editor.tsx @@ -9,70 +9,56 @@ import React, { Component, ChangeEvent } from 'react'; import _ from 'lodash'; import { EuiFormRow, EuiFieldText, EuiPanel } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { AttributionDescriptor } from '../../../../common/descriptor_types'; -export type XYZTMSSourceConfig = AttributionDescriptor & { +export type XYZTMSSourceConfig = { urlTemplate: string; + attributionText: string; + attributionUrl: string; }; -export interface Props { - onSourceConfigChange: (sourceConfig: XYZTMSSourceConfig) => void; +interface Props { + onSourceConfigChange: (sourceConfig: XYZTMSSourceConfig | null) => void; } interface State { - tmsInput: string; - tmsCanPreview: boolean; + url: string; attributionText: string; attributionUrl: string; } export class XYZTMSEditor extends Component { state = { - tmsInput: '', - tmsCanPreview: false, + url: '', attributionText: '', attributionUrl: '', }; - _sourceConfigChange = _.debounce((updatedSourceConfig: XYZTMSSourceConfig) => { - if (this.state.tmsCanPreview) { - this.props.onSourceConfigChange(updatedSourceConfig); - } - }, 2000); - - _handleTMSInputChange(e: ChangeEvent) { - const url = e.target.value; + _previewLayer = _.debounce(() => { + const { url, attributionText, attributionUrl } = this.state; - const canPreview = + const isUrlValid = url.indexOf('{x}') >= 0 && url.indexOf('{y}') >= 0 && url.indexOf('{z}') >= 0; - this.setState( - { - tmsInput: url, - tmsCanPreview: canPreview, - }, - () => this._sourceConfigChange({ urlTemplate: url }) - ); - } + const sourceConfig = isUrlValid + ? { + urlTemplate: url, + attributionText, + attributionUrl, + } + : null; + this.props.onSourceConfigChange(sourceConfig); + }, 500); - _handleTMSAttributionChange(attributionUpdate: AttributionDescriptor) { - this.setState( - { - attributionUrl: attributionUpdate.attributionUrl || '', - attributionText: attributionUpdate.attributionText || '', - }, - () => { - const { attributionText, attributionUrl, tmsInput } = this.state; + _onUrlChange = (event: ChangeEvent) => { + this.setState({ url: event.target.value }, this._previewLayer); + }; - if (tmsInput && attributionText && attributionUrl) { - this._sourceConfigChange({ - urlTemplate: tmsInput, - attributionText, - attributionUrl, - }); - } - } - ); - } + _onAttributionTextChange = (event: ChangeEvent) => { + this.setState({ attributionText: event.target.value }, this._previewLayer); + }; + + _onAttributionUrlChange = (event: ChangeEvent) => { + this.setState({ attributionUrl: event.target.value }, this._previewLayer); + }; render() { const { attributionText, attributionUrl } = this.state; @@ -81,11 +67,13 @@ export class XYZTMSEditor extends Component { this._handleTMSInputChange(e)} + onChange={this._onUrlChange} /> { }), ]} > - ) => - this._handleTMSAttributionChange({ attributionText: target.value }) - } - /> + { }), ]} > - ) => - this._handleTMSAttributionChange({ attributionUrl: target.value }) - } - /> + ); diff --git a/x-pack/plugins/ml/common/types/capabilities.ts b/x-pack/plugins/ml/common/types/capabilities.ts index f2177b0a3572f..504cd28b8fa14 100644 --- a/x-pack/plugins/ml/common/types/capabilities.ts +++ b/x-pack/plugins/ml/common/types/capabilities.ts @@ -72,6 +72,7 @@ export function getPluginPrivileges() { const adminMlCapabilitiesKeys = Object.keys(adminMlCapabilities); const allMlCapabilitiesKeys = [...adminMlCapabilitiesKeys, ...userMlCapabilitiesKeys]; // TODO: include ML in base privileges for the `8.0` release: https://github.com/elastic/kibana/issues/71422 + const savedObjects = ['index-pattern', 'dashboard', 'search', 'visualization']; const privilege = { app: [PLUGIN_ID, 'kibana'], excludeFromBasePrivileges: true, @@ -79,10 +80,6 @@ export function getPluginPrivileges() { insightsAndAlerting: ['jobsListLink'], }, catalogue: [PLUGIN_ID], - savedObject: { - all: [], - read: ['index-pattern', 'dashboard', 'search', 'visualization'], - }, }; return { @@ -90,11 +87,19 @@ export function getPluginPrivileges() { ...privilege, api: allMlCapabilitiesKeys.map((k) => `ml:${k}`), ui: allMlCapabilitiesKeys, + savedObject: { + all: savedObjects, + read: savedObjects, + }, }, user: { ...privilege, api: userMlCapabilitiesKeys.map((k) => `ml:${k}`), ui: userMlCapabilitiesKeys, + savedObject: { + all: [], + read: savedObjects, + }, }, }; } diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/bottom_bar/bottom_bar.tsx b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/bottom_bar/bottom_bar.tsx index e28386093abe0..8b6c16a71651a 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/bottom_bar/bottom_bar.tsx +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/bottom_bar/bottom_bar.tsx @@ -39,7 +39,7 @@ export const BottomBar: FC = ({ mode, onChangeMode, onCancel, di disableImport ? ( ) : null } diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js index 64d2e26f827f8..36b77a5a25e09 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js @@ -18,6 +18,7 @@ import { } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; +import { debounce } from 'lodash'; import { importerFactory } from './importer'; import { ResultsLinks } from '../results_links'; import { FilebeatConfigFlyout } from '../filebeat_config_flyout'; @@ -66,6 +67,7 @@ const DEFAULT_STATE = { indexPatternNameError: '', timeFieldName: undefined, isFilebeatFlyoutVisible: false, + checkingValidIndex: false, }; export class ImportView extends Component { @@ -76,14 +78,12 @@ export class ImportView extends Component { } componentDidMount() { - this.loadIndexNames(); this.loadIndexPatternNames(); } clickReset = () => { const state = getDefaultState(this.state, this.props.results); this.setState(state, () => { - this.loadIndexNames(); this.loadIndexPatternNames(); }); }; @@ -326,21 +326,33 @@ export class ImportView extends Component { }; onIndexChange = (e) => { - const name = e.target.value; - const { indexNames, indexPattern, indexPatternNames } = this.state; - + const index = e.target.value; this.setState({ - index: name, - indexNameError: isIndexNameValid(name, indexNames), - // if index pattern has been altered, check that it still matches the inputted index - ...(indexPattern === '' - ? {} - : { - indexPatternNameError: isIndexPatternNameValid(indexPattern, indexPatternNames, name), - }), + index, + checkingValidIndex: true, }); + this.debounceIndexCheck(index); }; + debounceIndexCheck = debounce(async (index) => { + if (index === '') { + this.setState({ checkingValidIndex: false }); + return; + } + + const { exists } = await ml.checkIndexExists({ index }); + const indexNameError = exists ? ( + + ) : ( + isIndexNameValid(index) + ); + + this.setState({ checkingValidIndex: false, indexNameError }); + }, 500); + onIndexPatternChange = (e) => { const name = e.target.value; const { indexPatternNames, index } = this.state; @@ -396,12 +408,6 @@ export class ImportView extends Component { this.props.showBottomBar(); }; - async loadIndexNames() { - const indices = await ml.getIndices(); - const indexNames = indices.map((i) => i.name); - this.setState({ indexNames }); - } - async loadIndexPatternNames() { await loadIndexPatterns(this.props.indexPatterns); const indexPatternNames = getIndexPatternNames(); @@ -437,6 +443,7 @@ export class ImportView extends Component { indexPatternNameError, timeFieldName, isFilebeatFlyoutVisible, + checkingValidIndex, } = this.state; const createPipeline = pipelineString !== ''; @@ -459,7 +466,8 @@ export class ImportView extends Component { index === '' || indexNameError !== '' || (createIndexPattern === true && indexPatternNameError !== '') || - initialized === true; + initialized === true || + checkingValidIndex === true; return ( @@ -655,16 +663,7 @@ function getDefaultState(state, results) { }; } -function isIndexNameValid(name, indexNames) { - if (indexNames.find((i) => i === name)) { - return ( - - ); - } - +function isIndexNameValid(name) { const reg = new RegExp('[\\\\/*?"<>|\\s,#]+'); if ( name !== name.toLowerCase() || // name should be lowercase diff --git a/x-pack/plugins/ml/public/application/jobs/components/custom_url_editor/utils.js b/x-pack/plugins/ml/public/application/jobs/components/custom_url_editor/utils.js index 0b33efa3f9ff1..87c2219f4d441 100644 --- a/x-pack/plugins/ml/public/application/jobs/components/custom_url_editor/utils.js +++ b/x-pack/plugins/ml/public/application/jobs/components/custom_url_editor/utils.js @@ -11,7 +11,6 @@ import url from 'url'; import { DASHBOARD_APP_URL_GENERATOR } from '../../../../../../../../src/plugins/dashboard/public'; -import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns'; import { getPartitioningFieldNames } from '../../../../../common/util/job_utils'; import { parseInterval } from '../../../../../common/util/parse_interval'; import { replaceTokensInUrlValue, isValidLabel } from '../../../util/custom_url_utils'; @@ -295,11 +294,11 @@ export function getTestUrl(job, customUrl) { }; return new Promise((resolve, reject) => { - ml.esSearch({ - index: ML_RESULTS_INDEX_PATTERN, - rest_total_hits_as_int: true, - body, - }) + ml.results + .anomalySearch({ + rest_total_hits_as_int: true, + body, + }) .then((resp) => { if (resp.hits.total > 0) { const record = resp.hits.hits[0]._source; diff --git a/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list_view/jobs_list_view.js b/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list_view/jobs_list_view.js index a3b6cb39815a3..e9f3cb0d7d70d 100644 --- a/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list_view/jobs_list_view.js +++ b/x-pack/plugins/ml/public/application/jobs/jobs_list/components/jobs_list_view/jobs_list_view.js @@ -63,9 +63,14 @@ export class JobsListView extends Component { this.showDeleteJobModal = () => {}; this.showStartDatafeedModal = () => {}; this.showCreateWatchFlyout = () => {}; + // work around to keep track of whether the component is mounted + // used to block timeouts for results polling + // which can run after unmounting + this._isMounted = false; } componentDidMount() { + this._isMounted = true; this.refreshJobSummaryList(true); if (this.props.isManagementTable !== true) { @@ -87,6 +92,7 @@ export class JobsListView extends Component { if (this.props.isManagementTable === undefined) { deletingJobsRefreshTimeout = null; } + this._isMounted = false; } openAutoStartDatafeedModal() { @@ -232,7 +238,7 @@ export class JobsListView extends Component { }; async refreshJobSummaryList(forceRefresh = false) { - if (forceRefresh === true || this.props.blockRefresh !== true) { + if (this._isMounted && (forceRefresh === true || this.props.blockRefresh !== true)) { // Set loading to true for jobs_list table for initial job loading if (this.state.loading === null) { this.setState({ loading: true }); @@ -283,6 +289,10 @@ export class JobsListView extends Component { } async checkDeletingJobTasks(forceRefresh = false) { + if (this._isMounted === false) { + return; + } + const { jobIds: taskJobIds } = await ml.jobs.deletingJobTasks(); const taskListHasChanged = diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/results_loader/searches.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/results_loader/searches.ts index 724a6146854af..51c396518c851 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/results_loader/searches.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/results_loader/searches.ts @@ -6,7 +6,6 @@ import { get } from 'lodash'; -import { ML_RESULTS_INDEX_PATTERN } from '../../../../../../common/constants/index_patterns'; import { escapeForElasticsearchQuery } from '../../../../util/string_utils'; import { ml } from '../../../../services/ml_api_service'; @@ -53,69 +52,70 @@ export function getScoresByRecord( jobIdFilterStr += `"${String(firstSplitField.value).replace(/\\/g, '\\\\')}"`; } - ml.esSearch({ - index: ML_RESULTS_INDEX_PATTERN, - size: 0, - body: { - query: { - bool: { - filter: [ - { - query_string: { - query: 'result_type:record', + ml.results + .anomalySearch({ + size: 0, + body: { + query: { + bool: { + filter: [ + { + query_string: { + query: 'result_type:record', + }, }, - }, - { - bool: { - must: [ - { - range: { - timestamp: { - gte: earliestMs, - lte: latestMs, - format: 'epoch_millis', + { + bool: { + must: [ + { + range: { + timestamp: { + gte: earliestMs, + lte: latestMs, + format: 'epoch_millis', + }, }, }, - }, - { - query_string: { - query: jobIdFilterStr, + { + query_string: { + query: jobIdFilterStr, + }, }, - }, - ], + ], + }, }, - }, - ], - }, - }, - aggs: { - detector_index: { - terms: { - field: 'detector_index', - order: { - recordScore: 'desc', - }, + ], }, - aggs: { - recordScore: { - max: { - field: 'record_score', + }, + aggs: { + detector_index: { + terms: { + field: 'detector_index', + order: { + recordScore: 'desc', }, }, - byTime: { - date_histogram: { - field: 'timestamp', - interval, - min_doc_count: 1, - extended_bounds: { - min: earliestMs, - max: latestMs, + aggs: { + recordScore: { + max: { + field: 'record_score', }, }, - aggs: { - recordScore: { - max: { - field: 'record_score', + byTime: { + date_histogram: { + field: 'timestamp', + interval, + min_doc_count: 1, + extended_bounds: { + min: earliestMs, + max: latestMs, + }, + }, + aggs: { + recordScore: { + max: { + field: 'record_score', + }, }, }, }, @@ -123,8 +123,7 @@ export function getScoresByRecord( }, }, }, - }, - }) + }) .then((resp: any) => { const detectorsByIndex = get(resp, ['aggregations', 'detector_index', 'buckets'], []); detectorsByIndex.forEach((dtr: any) => { diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/job_settings_form.tsx b/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/job_settings_form.tsx index 63dec536ea487..e31c6bc7b59e0 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/job_settings_form.tsx +++ b/x-pack/plugins/ml/public/application/jobs/new_job/recognize/components/job_settings_form.tsx @@ -258,7 +258,7 @@ export const JobSettingsForm: FC = ({ fill type="submit" isLoading={saveState === SAVE_STATE.SAVING} - disabled={!validationResult.formValid} + disabled={!validationResult.formValid || saveState === SAVE_STATE.SAVING} onClick={() => { onSubmit(formState); }} @@ -266,19 +266,11 @@ export const JobSettingsForm: FC = ({ defaultMessage: 'Create job', })} > - {saveState === SAVE_STATE.NOT_SAVED && ( - - )} - {saveState === SAVE_STATE.SAVING && ( - - )} + diff --git a/x-pack/plugins/ml/public/application/services/forecast_service.js b/x-pack/plugins/ml/public/application/services/forecast_service.js index c3d593c3347df..ed5a29ff74a63 100644 --- a/x-pack/plugins/ml/public/application/services/forecast_service.js +++ b/x-pack/plugins/ml/public/application/services/forecast_service.js @@ -9,7 +9,6 @@ import _ from 'lodash'; import { map } from 'rxjs/operators'; -import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; import { ml } from './ml_api_service'; // Gets a basic summary of the most recently run forecasts for the specified @@ -48,19 +47,19 @@ function getForecastsSummary(job, query, earliestMs, maxResults) { filterCriteria.push(query); } - ml.esSearch({ - index: ML_RESULTS_INDEX_PATTERN, - size: maxResults, - rest_total_hits_as_int: true, - body: { - query: { - bool: { - filter: filterCriteria, + ml.results + .anomalySearch({ + size: maxResults, + rest_total_hits_as_int: true, + body: { + query: { + bool: { + filter: filterCriteria, + }, }, + sort: [{ forecast_create_timestamp: { order: 'desc' } }], }, - sort: [{ forecast_create_timestamp: { order: 'desc' } }], - }, - }) + }) .then((resp) => { if (resp.hits.total !== 0) { obj.forecasts = resp.hits.hits.map((hit) => hit._source); @@ -106,29 +105,29 @@ function getForecastDateRange(job, forecastId) { // TODO - add in criteria for detector index and entity fields (by, over, partition) // once forecasting with these parameters is supported. - ml.esSearch({ - index: ML_RESULTS_INDEX_PATTERN, - size: 0, - body: { - query: { - bool: { - filter: filterCriteria, - }, - }, - aggs: { - earliest: { - min: { - field: 'timestamp', + ml.results + .anomalySearch({ + size: 0, + body: { + query: { + bool: { + filter: filterCriteria, }, }, - latest: { - max: { - field: 'timestamp', + aggs: { + earliest: { + min: { + field: 'timestamp', + }, + }, + latest: { + max: { + field: 'timestamp', + }, }, }, }, - }, - }) + }) .then((resp) => { obj.earliest = _.get(resp, 'aggregations.earliest.value', null); obj.latest = _.get(resp, 'aggregations.latest.value', null); @@ -243,9 +242,8 @@ function getForecastData( min: aggType.min, }; - return ml - .esSearch$({ - index: ML_RESULTS_INDEX_PATTERN, + return ml.results + .anomalySearch$({ size: 0, body: { query: { @@ -343,18 +341,18 @@ function getForecastRequestStats(job, forecastId) { }, ]; - ml.esSearch({ - index: ML_RESULTS_INDEX_PATTERN, - size: 1, - rest_total_hits_as_int: true, - body: { - query: { - bool: { - filter: filterCriteria, + ml.results + .anomalySearch({ + size: 1, + rest_total_hits_as_int: true, + body: { + query: { + bool: { + filter: filterCriteria, + }, }, }, - }, - }) + }) .then((resp) => { if (resp.hits.total !== 0) { obj.stats = _.first(resp.hits.hits)._source; diff --git a/x-pack/plugins/ml/public/application/services/ml_api_service/results.ts b/x-pack/plugins/ml/public/application/services/ml_api_service/results.ts index 521fd306847eb..08c3853ace6f8 100644 --- a/x-pack/plugins/ml/public/application/services/ml_api_service/results.ts +++ b/x-pack/plugins/ml/public/application/services/ml_api_service/results.ts @@ -96,4 +96,22 @@ export const resultsApiProvider = (httpService: HttpService) => ({ body, }); }, + + anomalySearch(obj: any) { + const body = JSON.stringify(obj); + return httpService.http({ + path: `${basePath()}/results/anomaly_search`, + method: 'POST', + body, + }); + }, + + anomalySearch$(obj: any) { + const body = JSON.stringify(obj); + return httpService.http$({ + path: `${basePath()}/results/anomaly_search`, + method: 'POST', + body, + }); + }, }); diff --git a/x-pack/plugins/ml/public/application/services/results_service/result_service_rx.ts b/x-pack/plugins/ml/public/application/services/results_service/result_service_rx.ts index 1bcbd8dbcdd63..d7f016b419377 100644 --- a/x-pack/plugins/ml/public/application/services/results_service/result_service_rx.ts +++ b/x-pack/plugins/ml/public/application/services/results_service/result_service_rx.ts @@ -262,8 +262,8 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) { }, ]; - return mlApiServices - .esSearch$({ + return mlApiServices.results + .anomalySearch$({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -399,8 +399,8 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) { }); }); - return mlApiServices - .esSearch$({ + return mlApiServices.results + .anomalySearch$({ index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: maxResults !== undefined ? maxResults : 100, @@ -484,8 +484,8 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) { }); } - return mlApiServices - .esSearch$({ + return mlApiServices.results + .anomalySearch$({ index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { diff --git a/x-pack/plugins/ml/public/application/services/results_service/results_service.js b/x-pack/plugins/ml/public/application/services/results_service/results_service.js index 55ddb1de3529e..50e2d0a5a2a0b 100644 --- a/x-pack/plugins/ml/public/application/services/results_service/results_service.js +++ b/x-pack/plugins/ml/public/application/services/results_service/results_service.js @@ -8,7 +8,6 @@ import _ from 'lodash'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; import { escapeForElasticsearchQuery } from '../../util/string_utils'; -import { ML_RESULTS_INDEX_PATTERN } from '../../../../common/constants/index_patterns'; import { ANOMALY_SWIM_LANE_HARD_LIMIT, SWIM_LANE_DEFAULT_PAGE_SIZE, @@ -66,9 +65,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: 0, body: { query: { @@ -238,9 +236,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: 0, body: { query: { @@ -378,9 +375,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: 0, body: { query: { @@ -560,9 +556,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: 0, body: { query: { @@ -721,9 +716,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: maxResults !== undefined ? maxResults : 100, rest_total_hits_as_int: true, body: { @@ -854,9 +848,8 @@ export function resultsServiceProvider(mlApiServices) { }); } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: maxResults !== undefined ? maxResults : 100, rest_total_hits_as_int: true, body: { @@ -980,9 +973,8 @@ export function resultsServiceProvider(mlApiServices) { } } - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: maxResults !== undefined ? maxResults : 100, rest_total_hits_as_int: true, body: { @@ -1307,9 +1299,8 @@ export function resultsServiceProvider(mlApiServices) { }); }); - mlApiServices - .esSearch({ - index: ML_RESULTS_INDEX_PATTERN, + mlApiServices.results + .anomalySearch({ size: 0, body: { query: { diff --git a/x-pack/plugins/ml/server/lib/check_annotations/index.ts b/x-pack/plugins/ml/server/lib/check_annotations/index.ts index fb37917c512cb..de19f0ead6791 100644 --- a/x-pack/plugins/ml/server/lib/check_annotations/index.ts +++ b/x-pack/plugins/ml/server/lib/check_annotations/index.ts @@ -18,17 +18,17 @@ import { // - ML_ANNOTATIONS_INDEX_ALIAS_READ alias is present // - ML_ANNOTATIONS_INDEX_ALIAS_WRITE alias is present export async function isAnnotationsFeatureAvailable({ - callAsCurrentUser, + callAsInternalUser, }: ILegacyScopedClusterClient) { try { const indexParams = { index: ML_ANNOTATIONS_INDEX_PATTERN }; - const annotationsIndexExists = await callAsCurrentUser('indices.exists', indexParams); + const annotationsIndexExists = await callAsInternalUser('indices.exists', indexParams); if (!annotationsIndexExists) { return false; } - const annotationsReadAliasExists = await callAsCurrentUser('indices.existsAlias', { + const annotationsReadAliasExists = await callAsInternalUser('indices.existsAlias', { index: ML_ANNOTATIONS_INDEX_ALIAS_READ, name: ML_ANNOTATIONS_INDEX_ALIAS_READ, }); @@ -37,7 +37,7 @@ export async function isAnnotationsFeatureAvailable({ return false; } - const annotationsWriteAliasExists = await callAsCurrentUser('indices.existsAlias', { + const annotationsWriteAliasExists = await callAsInternalUser('indices.existsAlias', { index: ML_ANNOTATIONS_INDEX_ALIAS_WRITE, name: ML_ANNOTATIONS_INDEX_ALIAS_WRITE, }); diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts index 3bf9bd0232a5d..5be443266ffe1 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts @@ -52,8 +52,8 @@ describe('annotation_service', () => { const response = await deleteAnnotation(annotationMockId); - expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('delete'); - expect(mockFunct.callAsCurrentUser.mock.calls[0][1]).toEqual(deleteParamsMock); + expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('delete'); + expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(deleteParamsMock); expect(response).toBe(acknowledgedResponseMock); done(); }); @@ -73,8 +73,8 @@ describe('annotation_service', () => { const response: GetResponse = await getAnnotations(indexAnnotationArgsMock); - expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('search'); - expect(mockFunct.callAsCurrentUser.mock.calls[0][1]).toEqual(getAnnotationsRequestMock); + expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('search'); + expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(getAnnotationsRequestMock); expect(Object.keys(response.annotations)).toHaveLength(1); expect(response.annotations[jobIdMock]).toHaveLength(2); expect(isAnnotations(response.annotations[jobIdMock])).toBeTruthy(); @@ -89,7 +89,7 @@ describe('annotation_service', () => { }; const mlClusterClientSpyError: any = { - callAsCurrentUser: jest.fn(() => { + callAsInternalUser: jest.fn(() => { return Promise.resolve(mockEsError); }), }; @@ -124,10 +124,10 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('index'); + expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index'); // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[0][1]; + const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -154,10 +154,10 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('index'); + expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index'); // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[0][1]; + const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -196,9 +196,9 @@ describe('annotation_service', () => { await indexAnnotation(annotation, modifiedUsernameMock); - expect(mockFunct.callAsCurrentUser.mock.calls[1][0]).toBe('index'); + expect(mockFunct.callAsInternalUser.mock.calls[1][0]).toBe('index'); // test if the annotation has been correctly updated - const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[1][1]; + const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[1][1]; const modifiedAnnotation = indexParamsCheck.body; expect(modifiedAnnotation.annotation).toBe(modifiedAnnotationText); expect(modifiedAnnotation.create_username).toBe(originalUsernameMock); diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts index f7353034b7453..8094689abf3e5 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts @@ -76,7 +76,7 @@ export interface DeleteParams { id: string; } -export function annotationProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { async function indexAnnotation(annotation: Annotation, username: string) { if (isAnnotation(annotation) === false) { // No need to translate, this will not be exposed in the UI. @@ -103,7 +103,7 @@ export function annotationProvider({ callAsCurrentUser }: ILegacyScopedClusterCl delete params.body.key; } - return await callAsCurrentUser('index', params); + return await callAsInternalUser('index', params); } async function getAnnotations({ @@ -286,7 +286,7 @@ export function annotationProvider({ callAsCurrentUser }: ILegacyScopedClusterCl }; try { - const resp = await callAsCurrentUser('search', params); + const resp = await callAsInternalUser('search', params); if (resp.error !== undefined && resp.message !== undefined) { // No need to translate, this will not be exposed in the UI. @@ -335,7 +335,7 @@ export function annotationProvider({ callAsCurrentUser }: ILegacyScopedClusterCl refresh: 'wait_for', }; - return await callAsCurrentUser('delete', param); + return await callAsInternalUser('delete', param); } return { diff --git a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts index c8471b5462205..1cb0656e88a0b 100644 --- a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts +++ b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts @@ -23,7 +23,7 @@ interface BoolQuery { bool: { [key: string]: any }; } -export function analyticsAuditMessagesProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function analyticsAuditMessagesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { // search for audit messages, // analyticsId is optional. without it, all analytics will be listed. async function getAnalyticsAuditMessages(analyticsId: string) { @@ -69,7 +69,7 @@ export function analyticsAuditMessagesProvider({ callAsCurrentUser }: ILegacySco } try { - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_NOTIFICATION_INDEX_PATTERN, ignore_unavailable: true, rest_total_hits_as_int: true, diff --git a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js index dcbabd879b47a..86d80c394137f 100644 --- a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js +++ b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js @@ -34,7 +34,7 @@ const anomalyDetectorTypeFilter = { }, }; -export function jobAuditMessagesProvider({ callAsCurrentUser, callAsInternalUser }) { +export function jobAuditMessagesProvider({ callAsInternalUser }) { // search for audit messages, // jobId is optional. without it, all jobs will be listed. // from is optional and should be a string formatted in ES time units. e.g. 12h, 1d, 7d @@ -100,7 +100,7 @@ export function jobAuditMessagesProvider({ callAsCurrentUser, callAsInternalUser } try { - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_NOTIFICATION_INDEX_PATTERN, ignore_unavailable: true, rest_total_hits_as_int: true, @@ -155,7 +155,7 @@ export function jobAuditMessagesProvider({ callAsCurrentUser, callAsInternalUser levelsPerJobAggSize = jobIds.length; } - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_NOTIFICATION_INDEX_PATTERN, ignore_unavailable: true, rest_total_hits_as_int: true, diff --git a/x-pack/plugins/ml/server/models/job_service/jobs.ts b/x-pack/plugins/ml/server/models/job_service/jobs.ts index e9ed2d0941d96..0aa1cfdae13c7 100644 --- a/x-pack/plugins/ml/server/models/job_service/jobs.ts +++ b/x-pack/plugins/ml/server/models/job_service/jobs.ts @@ -48,7 +48,7 @@ interface Results { } export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; + const { callAsInternalUser } = mlClusterClient; const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient); const { getAuditMessagesSummary } = jobAuditMessagesProvider(mlClusterClient); @@ -400,7 +400,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { const detailed = true; const jobIds = []; try { - const tasksList = await callAsCurrentUser('tasks.list', { actions, detailed }); + const tasksList = await callAsInternalUser('tasks.list', { actions, detailed }); Object.keys(tasksList.nodes).forEach((nodeId) => { const tasks = tasksList.nodes[nodeId].tasks; Object.keys(tasks).forEach((taskId) => { diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts index 4f97238a4a0b5..5ade86806f383 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts @@ -9,9 +9,9 @@ import { ILegacyScopedClusterClient } from 'kibana/server'; import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns'; import { CategoryId, Category } from '../../../../../common/types/categories'; -export function topCategoriesProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { +export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { async function getTotalCategories(jobId: string): Promise<{ total: number }> { - const totalResp = await callAsCurrentUser('search', { + const totalResp = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -37,7 +37,7 @@ export function topCategoriesProvider({ callAsCurrentUser }: ILegacyScopedCluste } async function getTopCategoryCounts(jobId: string, numberOfCategories: number) { - const top: SearchResponse = await callAsCurrentUser('search', { + const top: SearchResponse = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -99,7 +99,7 @@ export function topCategoriesProvider({ callAsCurrentUser }: ILegacyScopedCluste field: 'category_id', }, }; - const result: SearchResponse = await callAsCurrentUser('search', { + const result: SearchResponse = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, size, body: { diff --git a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts index 663ee846571e7..9c0efe259844c 100644 --- a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts +++ b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts @@ -75,7 +75,6 @@ function getFieldObject(fieldType: PartitionFieldsType, aggs: any) { } export const getPartitionFieldsValuesFactory = ({ - callAsCurrentUser, callAsInternalUser, }: ILegacyScopedClusterClient) => /** @@ -102,7 +101,7 @@ export const getPartitionFieldsValuesFactory = ({ const isModelPlotEnabled = job?.model_plot_config?.enabled; - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { diff --git a/x-pack/plugins/ml/server/models/results_service/results_service.ts b/x-pack/plugins/ml/server/models/results_service/results_service.ts index 8e904143263d7..04997e517bba9 100644 --- a/x-pack/plugins/ml/server/models/results_service/results_service.ts +++ b/x-pack/plugins/ml/server/models/results_service/results_service.ts @@ -31,7 +31,7 @@ interface Influencer { } export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { - const { callAsCurrentUser } = mlClusterClient; + const { callAsInternalUser } = mlClusterClient; // Obtains data for the anomalies table, aggregating anomalies by day or hour as requested. // Return an Object with properties 'anomalies' and 'interval' (interval used to aggregate anomalies, // one of day, hour or second. Note 'auto' can be provided as the aggregationInterval in the request, @@ -134,7 +134,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }); } - const resp: SearchResponse = await callAsCurrentUser('search', { + const resp: SearchResponse = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: maxRecords, @@ -288,7 +288,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie }, }; - const resp = await callAsCurrentUser('search', query); + const resp = await callAsInternalUser('search', query); const maxScore = _.get(resp, ['aggregations', 'max_score', 'value'], null); return { maxScore }; @@ -326,7 +326,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // Size of job terms agg, consistent with maximum number of jobs supported by Java endpoints. const maxJobs = 10000; - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -370,7 +370,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // from the given index and job ID. // Returned response consists of a list of examples against category ID. async function getCategoryExamples(jobId: string, categoryIds: any, maxExamples: number) { - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: ANOMALIES_TABLE_DEFAULT_QUERY_SIZE, // Matches size of records in anomaly summary table. @@ -405,7 +405,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie // Returned response contains four properties - categoryId, regex, examples // and terms (space delimited String of the common tokens matched in values of the category). async function getCategoryDefinition(jobId: string, categoryId: string) { - const resp = await callAsCurrentUser('search', { + const resp = await callAsInternalUser('search', { index: ML_RESULTS_INDEX_PATTERN, rest_total_hits_as_int: true, size: 1, diff --git a/x-pack/plugins/ml/server/routes/apidoc.json b/x-pack/plugins/ml/server/routes/apidoc.json index 98f7a78537c5c..f360da5df5392 100644 --- a/x-pack/plugins/ml/server/routes/apidoc.json +++ b/x-pack/plugins/ml/server/routes/apidoc.json @@ -48,6 +48,7 @@ "GetMaxAnomalyScore", "GetCategoryExamples", "GetPartitionFieldsValues", + "AnomalySearch", "Modules", "DataRecognizer", diff --git a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts index 3e6c6f5f6a2f8..94feb21a6b5fb 100644 --- a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts +++ b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts @@ -513,7 +513,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.updateDataFrameAnalytics', { body: request.body, diff --git a/x-pack/plugins/ml/server/routes/results_service.ts b/x-pack/plugins/ml/server/routes/results_service.ts index c7fcebd2a29a5..c9370362816fa 100644 --- a/x-pack/plugins/ml/server/routes/results_service.ts +++ b/x-pack/plugins/ml/server/routes/results_service.ts @@ -5,6 +5,7 @@ */ import { RequestHandlerContext } from 'kibana/server'; +import { schema } from '@kbn/config-schema'; import { wrapError } from '../client/error_wrapper'; import { RouteInitialization } from '../types'; import { @@ -15,6 +16,7 @@ import { partitionFieldValuesSchema, } from './schemas/results_service_schema'; import { resultsServiceProvider } from '../models/results_service'; +import { ML_RESULTS_INDEX_PATTERN } from '../../common/constants/index_patterns'; function getAnomaliesTableData(context: RequestHandlerContext, payload: any) { const rs = resultsServiceProvider(context.ml!.mlClient); @@ -232,4 +234,35 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) } }) ); + + /** + * @apiGroup ResultsService + * + * @api {post} /api/ml/results/anomaly_search Performs a search on the anomaly results index + * @apiName AnomalySearch + */ + router.post( + { + path: '/api/ml/results/anomaly_search', + validate: { + body: schema.maybe(schema.any()), + }, + options: { + tags: ['access:ml:canGetJobs'], + }, + }, + mlLicense.fullLicenseAPIGuard(async (context, request, response) => { + const body = { + ...request.body, + index: ML_RESULTS_INDEX_PATTERN, + }; + try { + return response.ok({ + body: await context.ml!.mlClient.callAsInternalUser('search', body), + }); + } catch (error) { + return response.customError(wrapError(error)); + } + }) + ); } diff --git a/x-pack/plugins/ml/server/shared_services/providers/system.ts b/x-pack/plugins/ml/server/shared_services/providers/system.ts index ec2662014546e..d292abc438a2f 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/system.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/system.ts @@ -37,7 +37,7 @@ export function getMlSystemProvider( return { mlSystemProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { // const hasMlCapabilities = getHasMlCapabilities(request); - const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; + const { callAsInternalUser } = mlClusterClient; return { async mlCapabilities() { isMinimumLicense(); @@ -77,7 +77,7 @@ export function getMlSystemProvider( // integration and currently alerting does not supply a request object. // await hasMlCapabilities(['canAccessML']); - return callAsCurrentUser('search', { + return callAsInternalUser('search', { ...searchParams, index: ML_RESULTS_INDEX_PATTERN, }); diff --git a/x-pack/plugins/security_solution/public/cases/components/all_cases/columns.tsx b/x-pack/plugins/security_solution/public/cases/components/all_cases/columns.tsx index 162966a2df28a..5c6c72477bf1f 100644 --- a/x-pack/plugins/security_solution/public/cases/components/all_cases/columns.tsx +++ b/x-pack/plugins/security_solution/public/cases/components/all_cases/columns.tsx @@ -6,6 +6,7 @@ import React, { useCallback } from 'react'; import { EuiAvatar, + EuiBadgeGroup, EuiBadge, EuiLink, EuiTableActionsColumnType, @@ -19,7 +20,6 @@ import { getEmptyTagValue } from '../../../common/components/empty_value'; import { Case } from '../../containers/types'; import { FormattedRelativePreferenceDate } from '../../../common/components/formatted_date'; import { CaseDetailsLink } from '../../../common/components/links'; -import { TruncatableText } from '../../../common/components/truncatable_text'; import * as i18n from './translations'; export type CasesColumns = @@ -35,6 +35,10 @@ const Spacer = styled.span` margin-left: ${({ theme }) => theme.eui.paddingSizes.s}; `; +const TagWrapper = styled(EuiBadgeGroup)` + width: 100%; +`; + const renderStringField = (field: string, dataTestSubj: string) => field != null ? {field} : getEmptyTagValue(); @@ -96,7 +100,7 @@ export const getCasesColumns = ( render: (tags: Case['tags']) => { if (tags != null && tags.length > 0) { return ( - + {tags.map((tag: string, i: number) => ( ))} - + ); } return getEmptyTagValue(); diff --git a/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx b/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx index 278b972ada970..e1d7d98ba8c51 100644 --- a/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx +++ b/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx @@ -119,10 +119,16 @@ describe('CaseView ', () => { ); expect( wrapper - .find(`[data-test-subj="case-view-tag-list"] [data-test-subj="case-tag"]`) + .find(`[data-test-subj="case-view-tag-list"] [data-test-subj="case-tag-coke"]`) .first() .text() ).toEqual(data.tags[0]); + expect( + wrapper + .find(`[data-test-subj="case-view-tag-list"] [data-test-subj="case-tag-pepsi"]`) + .first() + .text() + ).toEqual(data.tags[1]); expect(wrapper.find(`[data-test-subj="case-view-username"]`).first().text()).toEqual( data.createdBy.username ); diff --git a/x-pack/plugins/security_solution/public/cases/components/tag_list/index.test.tsx b/x-pack/plugins/security_solution/public/cases/components/tag_list/index.test.tsx index 939ddfde8b9dc..7c3fcde687033 100644 --- a/x-pack/plugins/security_solution/public/cases/components/tag_list/index.test.tsx +++ b/x-pack/plugins/security_solution/public/cases/components/tag_list/index.test.tsx @@ -102,14 +102,14 @@ describe('TagList ', () => { ); - expect(wrapper.find(`[data-test-subj="case-tag"]`).last().exists()).toBeTruthy(); + expect(wrapper.find(`[data-test-subj="case-tag-pepsi"]`).last().exists()).toBeTruthy(); wrapper.find(`[data-test-subj="tag-list-edit-button"]`).last().simulate('click'); await act(async () => { - expect(wrapper.find(`[data-test-subj="case-tag"]`).last().exists()).toBeFalsy(); + expect(wrapper.find(`[data-test-subj="case-tag-pepsi"]`).last().exists()).toBeFalsy(); wrapper.find(`[data-test-subj="edit-tags-cancel"]`).last().simulate('click'); await waitFor(() => { wrapper.update(); - expect(wrapper.find(`[data-test-subj="case-tag"]`).last().exists()).toBeTruthy(); + expect(wrapper.find(`[data-test-subj="case-tag-pepsi"]`).last().exists()).toBeTruthy(); }); }); }); diff --git a/x-pack/plugins/security_solution/public/cases/components/tag_list/index.tsx b/x-pack/plugins/security_solution/public/cases/components/tag_list/index.tsx index 7bb10c743a418..b5af1934f379c 100644 --- a/x-pack/plugins/security_solution/public/cases/components/tag_list/index.tsx +++ b/x-pack/plugins/security_solution/public/cases/components/tag_list/index.tsx @@ -10,6 +10,7 @@ import { EuiHorizontalRule, EuiFlexGroup, EuiFlexItem, + EuiBadgeGroup, EuiBadge, EuiButton, EuiButtonEmpty, @@ -98,15 +99,15 @@ export const TagList = React.memo( {tags.length === 0 && !isEditTags &&

{i18n.NO_TAGS}

} - {tags.length > 0 && - !isEditTags && - tags.map((tag, key) => ( - - + + {tags.length > 0 && + !isEditTags && + tags.map((tag, key) => ( + {tag} - - ))} + ))} + {isEditTags && ( diff --git a/x-pack/plugins/security_solution/public/cases/components/user_action_tree/helpers.tsx b/x-pack/plugins/security_solution/public/cases/components/user_action_tree/helpers.tsx index a6286693423c8..1401ac2c46528 100644 --- a/x-pack/plugins/security_solution/public/cases/components/user_action_tree/helpers.tsx +++ b/x-pack/plugins/security_solution/public/cases/components/user_action_tree/helpers.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiFlexGroup, EuiFlexItem, EuiBadge, EuiLink } from '@elastic/eui'; +import { EuiFlexGroup, EuiFlexItem, EuiBadgeGroup, EuiBadge, EuiLink } from '@elastic/eui'; import React from 'react'; import { CaseFullExternalService, Connector } from '../../../../../case/common/api'; @@ -50,14 +50,14 @@ const getTagsLabelTitle = (action: CaseUserActions) => ( {action.action === 'add' && i18n.ADDED_FIELD} {action.action === 'delete' && i18n.REMOVED_FIELD} {i18n.TAGS.toLowerCase()} - {action.newValue != null && - action.newValue.split(',').map((tag) => ( - + + {action.newValue != null && + action.newValue.split(',').map((tag) => ( {tag} - - ))} + ))} + ); diff --git a/x-pack/plugins/security_solution/public/common/components/autocomplete/operators.ts b/x-pack/plugins/security_solution/public/common/components/autocomplete/operators.ts index a81d8cde94e34..c54f58a3fd4b3 100644 --- a/x-pack/plugins/security_solution/public/common/components/autocomplete/operators.ts +++ b/x-pack/plugins/security_solution/public/common/components/autocomplete/operators.ts @@ -90,3 +90,17 @@ export const EXCEPTION_OPERATORS: OperatorOption[] = [ isInListOperator, isNotInListOperator, ]; + +export const EXCEPTION_OPERATORS_SANS_LISTS: OperatorOption[] = [ + isOperator, + isNotOperator, + isOneOfOperator, + isNotOneOfOperator, + existsOperator, + doesNotExistOperator, +]; + +export const EXCEPTION_OPERATORS_ONLY_LISTS: OperatorOption[] = [ + isInListOperator, + isNotInListOperator, +]; diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx index 2abbaee5187a9..a4fe52eaacf4e 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx @@ -61,6 +61,7 @@ export interface AddExceptionModalBaseProps { export interface AddExceptionModalProps extends AddExceptionModalBaseProps { onCancel: () => void; onConfirm: (didCloseAlert: boolean) => void; + onRuleChange?: () => void; alertStatus?: Status; } @@ -99,6 +100,7 @@ export const AddExceptionModal = memo(function AddExceptionModal({ alertData, onCancel, onConfirm, + onRuleChange, alertStatus, }: AddExceptionModalProps) { const { http } = useKibana().services; @@ -152,6 +154,14 @@ export const AddExceptionModal = memo(function AddExceptionModal({ [setExceptionItemsToAdd] ); + const handleRuleChange = useCallback( + (ruleChanged: boolean): void => { + if (ruleChanged && onRuleChange) { + onRuleChange(); + } + }, + [onRuleChange] + ); const onFetchOrCreateExceptionListError = useCallback( (error: Error) => { setFetchOrCreateListError(true); @@ -163,6 +173,7 @@ export const AddExceptionModal = memo(function AddExceptionModal({ ruleId, exceptionListType, onError: onFetchOrCreateExceptionListError, + onSuccess: handleRuleChange, }); const initialExceptionItems = useMemo(() => { @@ -265,8 +276,8 @@ export const AddExceptionModal = memo(function AddExceptionModal({ signalIndexName, ]); - const isSubmitButtonDisabled = useCallback( - () => fetchOrCreateListError || exceptionItemsToAdd.length === 0, + const isSubmitButtonDisabled = useMemo( + () => fetchOrCreateListError || exceptionItemsToAdd.every((item) => item.entries.length === 0), [fetchOrCreateListError, exceptionItemsToAdd] ); @@ -285,9 +296,13 @@ export const AddExceptionModal = memo(function AddExceptionModal({

{i18n.ADD_EXCEPTION_FETCH_ERROR}

)} - {fetchOrCreateListError === false && isLoadingExceptionList === true && ( - - )} + {fetchOrCreateListError === false && + (isLoadingExceptionList || + isIndexPatternLoading || + isSignalIndexLoading || + isSignalIndexPatternLoading) && ( + + )} {fetchOrCreateListError === false && !isSignalIndexLoading && !isSignalIndexPatternLoading && @@ -362,7 +377,7 @@ export const AddExceptionModal = memo(function AddExceptionModal({ {i18n.ADD_EXCEPTION} diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.test.tsx index b845848bd14d8..3dcc3eb5a8329 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.test.tsx @@ -213,7 +213,7 @@ describe('BuilderEntryItem', () => { title: 'logstash-*', fields, }} - showLabel={false} + showLabel={true} listType="detection" addNested={false} onChange={jest.fn()} @@ -245,7 +245,7 @@ describe('BuilderEntryItem', () => { title: 'logstash-*', fields, }} - showLabel={false} + showLabel={true} listType="detection" addNested={false} onChange={jest.fn()} diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.tsx index 736e88ee9fe06..dcc8a0e4fb1ba 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/builder_entry_item.tsx @@ -27,6 +27,7 @@ import { getEntryOnMatchAnyChange, getEntryOnListChange, } from './helpers'; +import { EXCEPTION_OPERATORS_ONLY_LISTS } from '../../autocomplete/operators'; interface EntryItemProps { entry: FormattedBuilderEntry; @@ -35,6 +36,7 @@ interface EntryItemProps { listType: ExceptionListType; addNested: boolean; onChange: (arg: BuilderEntry, i: number) => void; + onlyShowListOperators?: boolean; } export const BuilderEntryItem: React.FC = ({ @@ -44,6 +46,7 @@ export const BuilderEntryItem: React.FC = ({ addNested, showLabel, onChange, + onlyShowListOperators = false, }): JSX.Element => { const handleFieldChange = useCallback( ([newField]: IFieldType[]): void => { @@ -124,11 +127,14 @@ export const BuilderEntryItem: React.FC = ({ ); const renderOperatorInput = (isFirst: boolean): JSX.Element => { - const operatorOptions = getOperatorOptions( - entry, - listType, - entry.field != null && entry.field.type === 'boolean' - ); + const operatorOptions = onlyShowListOperators + ? EXCEPTION_OPERATORS_ONLY_LISTS + : getOperatorOptions( + entry, + listType, + entry.field != null && entry.field.type === 'boolean', + isFirst + ); const comboBox = ( void; onChangeExceptionItem: (item: ExceptionsBuilderExceptionItem, index: number) => void; + onlyShowListOperators?: boolean; } export const ExceptionListItemComponent = React.memo( @@ -58,6 +59,7 @@ export const ExceptionListItemComponent = React.memo( andLogicIncluded, onDeleteExceptionItem, onChangeExceptionItem, + onlyShowListOperators = false, }) => { const handleEntryChange = useCallback( (entry: BuilderEntry, entryIndex: number): void => { @@ -169,6 +171,7 @@ export const ExceptionListItemComponent = React.memo( exceptionItemIndex === 0 && index === 0 && item.nested !== 'child' } onChange={handleEntryChange} + onlyShowListOperators={onlyShowListOperators} />
{getDeleteButton( diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx index 8b74d44f29a18..17c94adf42648 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx @@ -14,32 +14,33 @@ import { getEntryExistsMock } from '../../../../../../lists/common/schemas/types import { getExceptionListItemSchemaMock } from '../../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; import { getListResponseMock } from '../../../../../../lists/common/schemas/response/list_schema.mock'; import { - isOperator, - isOneOfOperator, - isNotOperator, - isNotOneOfOperator, - existsOperator, doesNotExistOperator, - isInListOperator, EXCEPTION_OPERATORS, + EXCEPTION_OPERATORS_SANS_LISTS, + existsOperator, + isInListOperator, + isNotOneOfOperator, + isNotOperator, + isOneOfOperator, + isOperator, } from '../../autocomplete/operators'; -import { FormattedBuilderEntry, BuilderEntry, ExceptionsBuilderExceptionItem } from '../types'; -import { IIndexPattern, IFieldType } from '../../../../../../../../src/plugins/data/common'; -import { EntryNested, Entry } from '../../../../lists_plugin_deps'; +import { BuilderEntry, ExceptionsBuilderExceptionItem, FormattedBuilderEntry } from '../types'; +import { IFieldType, IIndexPattern } from '../../../../../../../../src/plugins/data/common'; +import { Entry, EntryNested } from '../../../../lists_plugin_deps'; import { - getFilteredIndexPatterns, - getFormattedBuilderEntry, - isEntryNested, - getFormattedBuilderEntries, - getUpdatedEntriesOnDelete, getEntryFromOperator, - getOperatorOptions, getEntryOnFieldChange, - getEntryOnOperatorChange, - getEntryOnMatchChange, - getEntryOnMatchAnyChange, getEntryOnListChange, + getEntryOnMatchAnyChange, + getEntryOnMatchChange, + getEntryOnOperatorChange, + getFilteredIndexPatterns, + getFormattedBuilderEntries, + getFormattedBuilderEntry, + getOperatorOptions, + getUpdatedEntriesOnDelete, + isEntryNested, } from './helpers'; import { OperatorOption } from '../../autocomplete/types'; @@ -672,6 +673,18 @@ describe('Exception builder helpers', () => { const expected: OperatorOption[] = [isOperator, existsOperator]; expect(output).toEqual(expected); }); + + test('it returns list operators if specified to', () => { + const payloadItem: FormattedBuilderEntry = getMockBuilderEntry(); + const output = getOperatorOptions(payloadItem, 'detection', false, true); + expect(output).toEqual(EXCEPTION_OPERATORS); + }); + + test('it does not return list operators if specified not to', () => { + const payloadItem: FormattedBuilderEntry = getMockBuilderEntry(); + const output = getOperatorOptions(payloadItem, 'detection', false, false); + expect(output).toEqual(EXCEPTION_OPERATORS_SANS_LISTS); + }); }); describe('#getEntryOnFieldChange', () => { diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.tsx index 2fe2c68941ae6..93bae091885c1 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.tsx @@ -22,6 +22,7 @@ import { existsOperator, isOneOfOperator, EXCEPTION_OPERATORS, + EXCEPTION_OPERATORS_SANS_LISTS, } from '../../autocomplete/operators'; import { OperatorOption } from '../../autocomplete/types'; import { @@ -40,7 +41,6 @@ import { getEntryValue, getExceptionOperatorSelect } from '../helpers'; * * @param patterns IIndexPattern containing available fields on rule index * @param item exception item entry - * @param addNested boolean noting whether or not UI is currently * set to add a nested field */ export const getFilteredIndexPatterns = ( @@ -295,12 +295,14 @@ export const getEntryFromOperator = ( * * @param item * @param listType - * + * @param isBoolean + * @param includeValueListOperators whether or not to include the 'is in list' and 'is not in list' operators */ export const getOperatorOptions = ( item: FormattedBuilderEntry, listType: ExceptionListType, - isBoolean: boolean + isBoolean: boolean, + includeValueListOperators = true ): OperatorOption[] => { if (item.nested === 'parent' || item.field == null) { return [isOperator]; @@ -309,7 +311,11 @@ export const getOperatorOptions = ( } else if (item.nested != null && listType === 'detection') { return isBoolean ? [isOperator, existsOperator] : [isOperator, isOneOfOperator, existsOperator]; } else { - return isBoolean ? [isOperator, existsOperator] : EXCEPTION_OPERATORS; + return isBoolean + ? [isOperator, existsOperator] + : includeValueListOperators + ? EXCEPTION_OPERATORS + : EXCEPTION_OPERATORS_SANS_LISTS; } }; @@ -547,3 +553,6 @@ export const getDefaultNestedEmptyEntry = (): EmptyNestedEntry => ({ type: OperatorTypeEnum.NESTED, entries: [], }); + +export const containsValueListEntry = (items: ExceptionsBuilderExceptionItem[]): boolean => + items.some((item) => item.entries.some((entry) => entry.type === OperatorTypeEnum.LIST)); diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx index 141429f152790..1ec49425ce8fd 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx @@ -24,7 +24,11 @@ import { BuilderButtonOptions } from './builder_button_options'; import { getNewExceptionItem, filterExceptionItems } from '../helpers'; import { ExceptionsBuilderExceptionItem, CreateExceptionListItemBuilderSchema } from '../types'; import { State, exceptionsBuilderReducer } from './reducer'; -import { getDefaultEmptyEntry, getDefaultNestedEmptyEntry } from './helpers'; +import { + containsValueListEntry, + getDefaultEmptyEntry, + getDefaultNestedEmptyEntry, +} from './helpers'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import exceptionableFields from '../exceptionable_fields.json'; @@ -44,6 +48,7 @@ const MyButtonsContainer = styled(EuiFlexItem)` const initialState: State = { disableAnd: false, + disableNested: false, disableOr: false, andLogicIncluded: false, addNested: false, @@ -82,12 +87,21 @@ export const ExceptionBuilder = ({ onChange, }: ExceptionBuilderProps) => { const [ - { exceptions, exceptionsToDelete, andLogicIncluded, disableAnd, disableOr, addNested }, + { + exceptions, + exceptionsToDelete, + andLogicIncluded, + disableAnd, + disableNested, + disableOr, + addNested, + }, dispatch, ] = useReducer(exceptionsBuilderReducer(), { ...initialState, disableAnd: isAndDisabled, disableOr: isOrDisabled, + disableNested: isNestedDisabled, }); const setUpdateExceptions = useCallback( @@ -362,6 +376,7 @@ export const ExceptionBuilder = ({ isOnlyItem={exceptions.length === 1} onDeleteExceptionItem={handleDeleteExceptionItem} onChangeExceptionItem={handleExceptionItemChange} + onlyShowListOperators={containsValueListEntry(exceptions)} />
@@ -379,7 +394,7 @@ export const ExceptionBuilder = ({ (state: State, action: Action): St const isAndDisabled = lastEntry != null && lastEntry.type === 'nested' && lastEntry.entries.length === 0; const isOrDisabled = lastEntry != null && lastEntry.type === 'nested'; + const containsValueList = action.exceptions.some( + ({ entries }) => entries.filter(({ type }) => type === OperatorTypeEnum.LIST).length > 0 + ); return { ...state, @@ -67,6 +71,7 @@ export const exceptionsBuilderReducer = () => (state: State, action: Action): St addNested: isAddNested, disableAnd: isAndDisabled, disableOr: isOrDisabled, + disableNested: containsValueList, }; } case 'setDefault': { diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx index 4ad077edf66ff..47c3498cb6ab4 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { memo, useState, useCallback, useEffect } from 'react'; +import React, { memo, useState, useCallback, useEffect, useMemo } from 'react'; import styled, { css } from 'styled-components'; import { EuiModal, @@ -146,6 +146,11 @@ export const EditExceptionModal = memo(function EditExceptionModal({ } }, [shouldDisableBulkClose]); + const isSubmitButtonDisabled = useMemo( + () => exceptionItemsToAdd.every((item) => item.entries.length === 0), + [exceptionItemsToAdd] + ); + const handleBuilderOnChange = useCallback( ({ exceptionItems, @@ -261,7 +266,12 @@ export const EditExceptionModal = memo(function EditExceptionModal({ {i18n.CANCEL} - + {i18n.EDIT_EXCEPTION_SAVE_BUTTON} diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx index 384badefc34aa..a54f20f56d56f 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx @@ -383,6 +383,7 @@ export const defaultEndpointExceptionItems = ( fieldName: 'file.Ext.code_signature.trusted', }); const [sha1Hash] = getMappedNonEcsValue({ data: alertData, fieldName: 'file.hash.sha1' }); + const [eventCode] = getMappedNonEcsValue({ data: alertData, fieldName: 'event.code' }); const namespaceType = 'agnostic'; return [ @@ -390,49 +391,40 @@ export const defaultEndpointExceptionItems = ( ...getNewExceptionItem({ listType, listId, namespaceType, ruleName }), entries: [ { - field: 'file.path', - operator: 'included', - type: 'match', - value: filePath ?? '', - }, - ], - }, - { - ...getNewExceptionItem({ listType, listId, namespaceType, ruleName }), - entries: [ - { - field: 'file.Ext.code_signature.subject_name', - operator: 'included', - type: 'match', - value: signatureSigner ?? '', + field: 'file.Ext.code_signature', + type: 'nested', + entries: [ + { + field: 'subject_name', + operator: 'included', + type: 'match', + value: signatureSigner ?? '', + }, + { + field: 'trusted', + operator: 'included', + type: 'match', + value: signatureTrusted ?? '', + }, + ], }, { - field: 'file.Ext.code_signature.trusted', + field: 'file.path', operator: 'included', type: 'match', - value: signatureTrusted ?? '', + value: filePath ?? '', }, - ], - }, - { - ...getNewExceptionItem({ listType, listId, namespaceType, ruleName }), - entries: [ { field: 'file.hash.sha1', operator: 'included', type: 'match', value: sha1Hash ?? '', }, - ], - }, - { - ...getNewExceptionItem({ listType, listId, namespaceType, ruleName }), - entries: [ { - field: 'event.category', + field: 'event.code', operator: 'included', - type: 'match_any', - value: getMappedNonEcsValue({ data: alertData, fieldName: 'event.category' }), + type: 'match', + value: eventCode ?? '', }, ], }, diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.test.tsx index 7bef771d367f3..6dbf5922e0a97 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.test.tsx @@ -38,6 +38,7 @@ describe('useFetchOrCreateRuleExceptionList', () => { ReturnUseFetchOrCreateRuleExceptionList >; const onError = jest.fn(); + const onSuccess = jest.fn(); const error = new Error('Something went wrong'); const ruleId = 'myRuleId'; const abortCtrl = new AbortController(); @@ -94,6 +95,7 @@ describe('useFetchOrCreateRuleExceptionList', () => { ruleId, exceptionListType: listType, onError, + onSuccess, }) ); }); @@ -168,6 +170,15 @@ describe('useFetchOrCreateRuleExceptionList', () => { expect(patchRule).toHaveBeenCalledTimes(1); }); }); + it('invokes onSuccess indicating that the rule changed', async () => { + await act(async () => { + const { waitForNextUpdate } = render(); + await waitForNextUpdate(); + await waitForNextUpdate(); + await waitForNextUpdate(); + expect(onSuccess).toHaveBeenCalledWith(true); + }); + }); }); describe("when the rule has exception list references and 'detection' is passed in", () => { @@ -207,6 +218,15 @@ describe('useFetchOrCreateRuleExceptionList', () => { expect(result.current[1]).toEqual(detectionExceptionList); }); }); + it('invokes onSuccess indicating that the rule did not change', async () => { + await act(async () => { + const { waitForNextUpdate } = render(); + await waitForNextUpdate(); + await waitForNextUpdate(); + await waitForNextUpdate(); + expect(onSuccess).toHaveBeenCalledWith(false); + }); + }); describe("but the rule does not have a reference to 'detection' type exception list", () => { beforeEach(() => { @@ -362,5 +382,14 @@ describe('useFetchOrCreateRuleExceptionList', () => { expect(onError).toHaveBeenCalledWith(error); }); }); + + it('does not call onSuccess', async () => { + await act(async () => { + const { waitForNextUpdate } = render(); + await waitForNextUpdate(); + await waitForNextUpdate(); + expect(onSuccess).not.toHaveBeenCalled(); + }); + }); }); }); diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx index b238e25f6de59..2a5ef7b21b519 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx @@ -31,6 +31,7 @@ export interface UseFetchOrCreateRuleExceptionListProps { ruleId: Rule['id']; exceptionListType: ExceptionListSchema['type']; onError: (arg: Error) => void; + onSuccess?: (ruleWasChanged: boolean) => void; } /** @@ -47,6 +48,7 @@ export const useFetchOrCreateRuleExceptionList = ({ ruleId, exceptionListType, onError, + onSuccess, }: UseFetchOrCreateRuleExceptionListProps): ReturnUseFetchOrCreateRuleExceptionList => { const [isLoading, setIsLoading] = useState(false); const [exceptionList, setExceptionList] = useState(null); @@ -168,6 +170,9 @@ export const useFetchOrCreateRuleExceptionList = ({ if (isSubscribed) { setExceptionList(exceptionListToUse); setIsLoading(false); + if (onSuccess) { + onSuccess(matchingList == null); + } } } catch (error) { if (isSubscribed) { @@ -183,7 +188,7 @@ export const useFetchOrCreateRuleExceptionList = ({ isSubscribed = false; abortCtrl.abort(); }; - }, [http, ruleId, exceptionListType, onError]); + }, [http, ruleId, exceptionListType, onError, onSuccess]); return [isLoading, exceptionList]; }; diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/index.tsx index 9cc73d4491146..34dc47b9cd411 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/index.tsx @@ -57,6 +57,7 @@ interface ExceptionsViewerProps { exceptionListsMeta: ExceptionIdentifiers[]; availableListTypes: ExceptionListTypeEnum[]; commentsAccordionId: string; + onRuleChange?: () => void; } const ExceptionsViewerComponent = ({ @@ -66,6 +67,7 @@ const ExceptionsViewerComponent = ({ exceptionListsMeta, availableListTypes, commentsAccordionId, + onRuleChange, }: ExceptionsViewerProps): JSX.Element => { const { services } = useKibana(); const [, dispatchToaster] = useStateToaster(); @@ -275,6 +277,7 @@ const ExceptionsViewerComponent = ({ exceptionListType={exceptionListTypeToEdit} onCancel={handleOnCancelExceptionModal} onConfirm={handleOnConfirmExceptionModal} + onRuleChange={onRuleChange} /> )} diff --git a/x-pack/plugins/security_solution/public/common/components/help_menu/index.tsx b/x-pack/plugins/security_solution/public/common/components/help_menu/index.tsx index f4477740f7b58..1eaa16fd058a5 100644 --- a/x-pack/plugins/security_solution/public/common/components/help_menu/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/help_menu/index.tsx @@ -39,7 +39,7 @@ export const HelpMenu = React.memo(() => { }, { linkType: 'discuss', - href: 'https://discuss.elastic.co/c/siem', + href: 'https://discuss.elastic.co/c/security', target: '_blank', rel: 'noopener', }, diff --git a/x-pack/plugins/security_solution/public/common/components/news_feed/helpers.test.ts b/x-pack/plugins/security_solution/public/common/components/news_feed/helpers.test.ts index cdd04b50a6d50..35a59f4d18e8b 100644 --- a/x-pack/plugins/security_solution/public/common/components/news_feed/helpers.test.ts +++ b/x-pack/plugins/security_solution/public/common/components/news_feed/helpers.test.ts @@ -144,7 +144,7 @@ describe('helpers', () => { hash: '5a35c984a9cdc1c6a25913f3d0b99b1aefc7257bc3b936c39db9fa0435edeed0', imageUrl: 'https://aws1.discourse-cdn.com/elastic/original/3X/f/8/f8c3d0b9971cfcd0be349d973aa5799f71d280cc.png?blade=securitysolutionfeed', - linkUrl: 'https://discuss.elastic.co/c/siem?blade=securitysolutionfeed', + linkUrl: 'https://discuss.elastic.co/c/security?blade=securitysolutionfeed', publishOn: expect.any(Date), title: 'Got SIEM Questions?', }, @@ -284,7 +284,7 @@ describe('helpers', () => { }, link_text: null, link_url: { - en: 'https://discuss.elastic.co/c/siem?blade=securitysolutionfeed', + en: 'https://discuss.elastic.co/c/security?blade=securitysolutionfeed', ja: translatedLinkUrl, }, languages: null, diff --git a/x-pack/plugins/security_solution/public/common/mock/news.ts b/x-pack/plugins/security_solution/public/common/mock/news.ts index 3e421ce19ae9c..51449347e649a 100644 --- a/x-pack/plugins/security_solution/public/common/mock/news.ts +++ b/x-pack/plugins/security_solution/public/common/mock/news.ts @@ -16,7 +16,7 @@ export const rawNewsApiResponse: RawNewsApiResponse = { "There's an awesome community of Elastic SIEM users out there. Join the discussion about configuring, learning, and using the Elastic SIEM app, and detecting threats!", }, link_text: null, - link_url: { en: 'https://discuss.elastic.co/c/siem?blade=securitysolutionfeed' }, + link_url: { en: 'https://discuss.elastic.co/c/security?blade=securitysolutionfeed' }, languages: null, badge: { en: '7.6' }, image_url: { diff --git a/x-pack/plugins/security_solution/public/common/mock/raw_news.ts b/x-pack/plugins/security_solution/public/common/mock/raw_news.ts index 85bef15a41b23..9cd06ed107956 100644 --- a/x-pack/plugins/security_solution/public/common/mock/raw_news.ts +++ b/x-pack/plugins/security_solution/public/common/mock/raw_news.ts @@ -17,7 +17,7 @@ export const rawNewsJSON = ` }, "link_text":null, "link_url":{ - "en":"https://discuss.elastic.co/c/siem?blade=securitysolutionfeed" + "en":"https://discuss.elastic.co/c/security?blade=securitysolutionfeed" }, "languages":null, "badge":{ diff --git a/x-pack/plugins/security_solution/public/detections/components/alerts_table/default_config.tsx b/x-pack/plugins/security_solution/public/detections/components/alerts_table/default_config.tsx index a4ce6c0200eb3..010129d2d4593 100644 --- a/x-pack/plugins/security_solution/public/detections/components/alerts_table/default_config.tsx +++ b/x-pack/plugins/security_solution/public/detections/components/alerts_table/default_config.tsx @@ -204,6 +204,7 @@ export const requiredFieldsForActions = [ 'file.Ext.code_signature.trusted', 'file.hash.sha1', 'host.os.family', + 'event.code', ]; interface AlertActionArgs { diff --git a/x-pack/plugins/security_solution/public/detections/components/rules/step_rule_actions/index.tsx b/x-pack/plugins/security_solution/public/detections/components/rules/step_rule_actions/index.tsx index 2b842515d0b71..5b4f7677dbc30 100644 --- a/x-pack/plugins/security_solution/public/detections/components/rules/step_rule_actions/index.tsx +++ b/x-pack/plugins/security_solution/public/detections/components/rules/step_rule_actions/index.tsx @@ -15,6 +15,7 @@ import { import { findIndex } from 'lodash/fp'; import React, { FC, memo, useCallback, useEffect, useMemo, useState } from 'react'; +import { ActionVariable } from '../../../../../../triggers_actions_ui/public'; import { RuleStep, RuleStepProps, @@ -36,7 +37,7 @@ import { APP_ID } from '../../../../../common/constants'; interface StepRuleActionsProps extends RuleStepProps { defaultValues?: ActionsStepRule | null; - actionMessageParams: string[]; + actionMessageParams: ActionVariable[]; } const stepActionsDefaultValue = { diff --git a/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/api.ts b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/api.ts index 66be5397c72c1..08d564230b85f 100644 --- a/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/api.ts +++ b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/api.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { HttpStart } from '../../../../../../../../src/core/public'; import { DETECTION_ENGINE_RULES_URL, DETECTION_ENGINE_PREPACKAGED_URL, @@ -126,7 +127,23 @@ export const fetchRules = async ({ * @throws An error if response is not OK */ export const fetchRuleById = async ({ id, signal }: FetchRuleProps): Promise => - KibanaServices.get().http.fetch(DETECTION_ENGINE_RULES_URL, { + pureFetchRuleById({ id, http: KibanaServices.get().http, signal }); + +/** + * Fetch a Rule by providing a Rule ID + * + * @param id Rule ID's (not rule_id) + * @param http Kibana http service + * @param signal to cancel request + * + * @throws An error if response is not OK + */ +export const pureFetchRuleById = async ({ + id, + http, + signal, +}: FetchRuleProps & { http: HttpStart }): Promise => + http.fetch(DETECTION_ENGINE_RULES_URL, { method: 'GET', query: { id }, signal, diff --git a/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/use_rule_async.tsx b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/use_rule_async.tsx new file mode 100644 index 0000000000000..fbca46097dcd9 --- /dev/null +++ b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/use_rule_async.tsx @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useEffect, useCallback } from 'react'; + +import { useAsync, withOptionalSignal } from '../../../../shared_imports'; +import { useHttp } from '../../../../common/lib/kibana'; +import { useAppToasts } from '../../../../common/hooks/use_app_toasts'; +import { pureFetchRuleById } from './api'; +import { Rule } from './types'; +import * as i18n from './translations'; + +export interface UseRuleAsync { + error: unknown; + loading: boolean; + refresh: () => void; + rule: Rule | null; +} + +const _fetchRule = withOptionalSignal(pureFetchRuleById); +const _useRuleAsync = () => useAsync(_fetchRule); + +export const useRuleAsync = (ruleId: string): UseRuleAsync => { + const { start, loading, result, error } = _useRuleAsync(); + const http = useHttp(); + const { addError } = useAppToasts(); + + const fetch = useCallback(() => { + start({ id: ruleId, http }); + }, [http, ruleId, start]); + + // initial fetch + useEffect(() => { + fetch(); + }, [fetch]); + + // toast on error + useEffect(() => { + if (error != null) { + addError(error, { title: i18n.RULE_AND_TIMELINE_FETCH_FAILURE }); + } + }, [addError, error]); + + return { error, loading, refresh: fetch, rule: result ?? null }; +}; diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts index 10d969ae7e6e8..14cf476e66563 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/all/__mocks__/mock.ts @@ -6,6 +6,7 @@ import { esFilters } from '../../../../../../../../../../src/plugins/data/public'; import { Rule, RuleError } from '../../../../../containers/detection_engine/rules'; +import { List } from '../../../../../../../common/detection_engine/schemas/types'; import { AboutStepRule, ActionsStepRule, DefineStepRule, ScheduleStepRule } from '../../types'; import { FieldValueQueryBar } from '../../../../../components/rules/query_bar'; @@ -240,3 +241,9 @@ export const mockRules: Rule[] = [ mockRule('abe6c564-050d-45a5-aaf0-386c37dd1f61'), mockRule('63f06f34-c181-4b2d-af35-f2ace572a1ee'), ]; + +export const mockExceptionsList: List = { + namespace_type: 'single', + id: '75cd4380-cc5e-11ea-9101-5b34f44aeb44', + type: 'detection', +}; diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts index 745518b90df00..6458d2faa2468 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.test.ts @@ -4,7 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ +import { List } from '../../../../../../common/detection_engine/schemas/types'; +import { ENDPOINT_LIST_ID } from '../../../../../shared_imports'; import { NewRule } from '../../../../containers/detection_engine/rules'; + import { DefineStepRuleJson, ScheduleStepRuleJson, @@ -26,12 +29,19 @@ import { } from './helpers'; import { mockDefineStepRule, + mockExceptionsList, mockQueryBar, mockScheduleStepRule, mockAboutStepRule, mockActionsStepRule, } from '../all/__mocks__/mock'; +const ENDPOINT_LIST = { + id: ENDPOINT_LIST_ID, + namespace_type: 'agnostic', + type: 'endpoint', +} as List; + describe('helpers', () => { describe('getTimeTypeValue', () => { test('returns timeObj with value 0 if no time value found', () => { @@ -373,6 +383,53 @@ describe('helpers', () => { expect(result).toEqual(expected); }); + test('returns formatted object with endpoint exceptions_list', () => { + const result: AboutStepRuleJson = formatAboutStepData( + { + ...mockData, + isAssociatedToEndpointList: true, + }, + [] + ); + expect(result.exceptions_list).toEqual([ + { id: ENDPOINT_LIST_ID, namespace_type: 'agnostic', type: 'endpoint' }, + ]); + }); + + test('returns formatted object with detections exceptions_list', () => { + const result: AboutStepRuleJson = formatAboutStepData(mockData, [mockExceptionsList]); + expect(result.exceptions_list).toEqual([mockExceptionsList]); + }); + + test('returns formatted object with both exceptions_lists', () => { + const result: AboutStepRuleJson = formatAboutStepData( + { + ...mockData, + isAssociatedToEndpointList: true, + }, + [mockExceptionsList] + ); + expect(result.exceptions_list).toEqual([ENDPOINT_LIST, mockExceptionsList]); + }); + + test('returns formatted object with pre-existing exceptions lists', () => { + const exceptionsLists: List[] = [ENDPOINT_LIST, mockExceptionsList]; + const result: AboutStepRuleJson = formatAboutStepData( + { + ...mockData, + isAssociatedToEndpointList: true, + }, + exceptionsLists + ); + expect(result.exceptions_list).toEqual(exceptionsLists); + }); + + test('returns formatted object with pre-existing endpoint exceptions list disabled', () => { + const exceptionsLists: List[] = [ENDPOINT_LIST, mockExceptionsList]; + const result: AboutStepRuleJson = formatAboutStepData(mockData, exceptionsLists); + expect(result.exceptions_list).toEqual([mockExceptionsList]); + }); + test('returns formatted object with empty falsePositive and references filtered out', () => { const mockStepData = { ...mockData, diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts index 38f7836f678f9..a972afbd8c0c5 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/create/helpers.ts @@ -12,8 +12,9 @@ import { NOTIFICATION_THROTTLE_NO_ACTIONS } from '../../../../../../common/const import { transformAlertToRuleAction } from '../../../../../../common/detection_engine/transform_actions'; import { RuleType } from '../../../../../../common/detection_engine/types'; import { isMlRule } from '../../../../../../common/machine_learning/helpers'; +import { List } from '../../../../../../common/detection_engine/schemas/types'; import { ENDPOINT_LIST_ID } from '../../../../../shared_imports'; -import { NewRule } from '../../../../containers/detection_engine/rules'; +import { NewRule, Rule } from '../../../../containers/detection_engine/rules'; import { AboutStepRule, @@ -146,7 +147,10 @@ export const formatScheduleStepData = (scheduleData: ScheduleStepRule): Schedule }; }; -export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRuleJson => { +export const formatAboutStepData = ( + aboutStepData: AboutStepRule, + exceptionsList?: List[] +): AboutStepRuleJson => { const { author, falsePositives, @@ -162,6 +166,10 @@ export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRule timestampOverride, ...rest } = aboutStepData; + + const detectionExceptionLists = + exceptionsList != null ? exceptionsList.filter((list) => list.type !== 'endpoint') : []; + const resp = { author: author.filter((item) => !isEmpty(item)), ...(isBuildingBlock ? { building_block_type: 'default' } : {}), @@ -169,8 +177,13 @@ export const formatAboutStepData = (aboutStepData: AboutStepRule): AboutStepRule ? { exceptions_list: [ { id: ENDPOINT_LIST_ID, namespace_type: 'agnostic', type: 'endpoint' }, + ...detectionExceptionLists, ] as AboutStepRuleJson['exceptions_list'], } + : exceptionsList != null + ? { + exceptions_list: [...detectionExceptionLists], + } : {}), false_positives: falsePositives.filter((item) => !isEmpty(item)), references: references.filter((item) => !isEmpty(item)), @@ -218,11 +231,12 @@ export const formatRule = ( defineStepData: DefineStepRule, aboutStepData: AboutStepRule, scheduleData: ScheduleStepRule, - actionsData: ActionsStepRule + actionsData: ActionsStepRule, + rule?: Rule | null ): NewRule => deepmerge.all([ formatDefineStepData(defineStepData), - formatAboutStepData(aboutStepData), + formatAboutStepData(aboutStepData, rule?.exceptions_list), formatScheduleStepData(scheduleData), formatActionsStepData(actionsData), ]) as NewRule; diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/details/index.tsx b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/details/index.tsx index 5832f07134936..9c130a7d351fa 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/details/index.tsx +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/details/index.tsx @@ -37,7 +37,7 @@ import { } from '../../../../../common/components/link_to/redirect_to_detection_engine'; import { SiemSearchBar } from '../../../../../common/components/search_bar'; import { WrapperPage } from '../../../../../common/components/wrapper_page'; -import { useRule, Rule } from '../../../../containers/detection_engine/rules'; +import { Rule } from '../../../../containers/detection_engine/rules'; import { useListsConfig } from '../../../../containers/detection_engine/lists/use_lists_config'; import { useWithSource } from '../../../../../common/containers/source'; @@ -84,7 +84,7 @@ import { ExceptionsViewer } from '../../../../../common/components/exceptions/vi import { DEFAULT_INDEX_PATTERN, FILTERS_GLOBAL_HEIGHT } from '../../../../../../common/constants'; import { useFullScreen } from '../../../../../common/containers/use_full_screen'; import { Display } from '../../../../../hosts/pages/display'; -import { ExceptionListTypeEnum, ExceptionIdentifiers } from '../../../../../lists_plugin_deps'; +import { ExceptionListTypeEnum, ExceptionIdentifiers } from '../../../../../shared_imports'; import { getEventsViewerBodyHeight, MIN_EVENTS_VIEWER_BODY_HEIGHT, @@ -92,6 +92,7 @@ import { import { footerHeight } from '../../../../../timelines/components/timeline/footer'; import { isMlRule } from '../../../../../../common/machine_learning/helpers'; import { isThresholdRule } from '../../../../../../common/detection_engine/utils'; +import { useRuleAsync } from '../../../../containers/detection_engine/rules/use_rule_async'; import { showGlobalFilters } from '../../../../../timelines/components/timeline/helpers'; import { timelineSelectors } from '../../../../../timelines/store/timeline'; import { timelineDefaults } from '../../../../../timelines/store/timeline/defaults'; @@ -146,7 +147,9 @@ export const RuleDetailsPageComponent: FC = ({ } = useListsConfig(); const loading = userInfoLoading || listsConfigLoading; const { detailName: ruleId } = useParams(); - const [isLoading, rule] = useRule(ruleId); + const { rule: maybeRule, refresh: refreshRule, loading: ruleLoading } = useRuleAsync(ruleId); + const [rule, setRule] = useState(null); + const isLoading = ruleLoading && rule == null; // This is used to re-trigger api rule status when user de/activate rule const [ruleEnabled, setRuleEnabled] = useState(null); const [ruleDetailTab, setRuleDetailTab] = useState(RuleDetailTabs.alerts); @@ -172,10 +175,17 @@ export const RuleDetailsPageComponent: FC = ({ mlCapabilities.isPlatinumOrTrialLicense && hasMlAdminPermissions(mlCapabilities); const ruleDetailTabs = getRuleDetailsTabs(rule); - const title = isLoading === true || rule === null ? : rule.name; + // persist rule until refresh is complete + useEffect(() => { + if (maybeRule != null) { + setRule(maybeRule); + } + }, [maybeRule]); + + const title = rule?.name ?? ; const subTitle = useMemo( () => - isLoading === true || rule === null ? ( + rule == null ? ( ) : ( [ @@ -211,7 +221,7 @@ export const RuleDetailsPageComponent: FC = ({ ), ] ), - [isLoading, rule] + [rule] ); // Set showBuildingBlockAlerts if rule is a Building Block Rule otherwise we won't show alerts @@ -524,6 +534,7 @@ export const RuleDetailsPageComponent: FC = ({ availableListTypes={exceptionLists.allowedExceptionListTypes} commentsAccordionId={'ruleDetailsTabExceptions'} exceptionListsMeta={exceptionLists.lists} + onRuleChange={refreshRule} /> )} {ruleDetailTab === RuleDetailTabs.failures && } diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx index 0900cdb8f4789..3cc874b85ecf3 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/edit/index.tsx @@ -273,7 +273,8 @@ const EditRulePageComponent: FC = () => { : myScheduleRuleForm.data) as ScheduleStepRule, (activeFormId === RuleStep.ruleActions ? activeForm.data - : myActionsRuleForm.data) as ActionsStepRule + : myActionsRuleForm.data) as ActionsStepRule, + rule ), ...(ruleId ? { id: ruleId } : {}), }); diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/helpers.tsx b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/helpers.tsx index 11b779e71b9b2..8f8967f2ff6d5 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/helpers.tsx +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/helpers.tsx @@ -9,6 +9,7 @@ import moment from 'moment'; import memoizeOne from 'memoize-one'; import { useLocation } from 'react-router-dom'; +import { ActionVariable } from '../../../../../../triggers_actions_ui/public'; import { RuleAlertAction, RuleType } from '../../../../../common/detection_engine/types'; import { isMlRule } from '../../../../../common/machine_learning/helpers'; import { transformRuleToAlertAction } from '../../../../../common/detection_engine/transform_actions'; @@ -326,18 +327,23 @@ export const getActionMessageRuleParams = (ruleType: RuleType): string[] => { return ruleParamsKeys; }; -export const getActionMessageParams = memoizeOne((ruleType: RuleType | undefined): string[] => { - if (!ruleType) { - return []; +export const getActionMessageParams = memoizeOne( + (ruleType: RuleType | undefined): ActionVariable[] => { + if (!ruleType) { + return []; + } + const actionMessageRuleParams = getActionMessageRuleParams(ruleType); + + return [ + { name: 'state.signals_count', description: 'state.signals_count' }, + { name: '{context.results_link}', description: 'context.results_link' }, + ...actionMessageRuleParams.map((param) => { + const extendedParam = `context.rule.${param}`; + return { name: extendedParam, description: extendedParam }; + }), + ]; } - const actionMessageRuleParams = getActionMessageRuleParams(ruleType); - - return [ - 'state.signals_count', - '{context.results_link}', - ...actionMessageRuleParams.map((param) => `context.rule.${param}`), - ]; -}); +); // typed as null not undefined as the initial state for this value is null. export const userHasNoPermissions = (canUserCRUD: boolean | null): boolean => diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.test.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.test.ts index 32f96b519acc5..1cbd1ee0f76ae 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.test.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.test.ts @@ -23,6 +23,6 @@ describe('getBreadcrumbs', () => { [], getUrlForAppMock ) - ).toEqual([{ href: 'securitySolution:detections', text: 'Detection alerts' }]); + ).toEqual([{ href: 'securitySolution:detections', text: 'Detections' }]); }); }); diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.ts index 75d1df9406d25..c1b4fa3e2b7d9 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/rules/utils.ts @@ -57,7 +57,7 @@ export const getBreadcrumbs = ( ): ChromeBreadcrumb[] => { let breadcrumb = [ { - text: i18nDetections.PAGE_TITLE, + text: i18nDetections.BREADCRUMB_TITLE, href: getUrlForApp(`${APP_ID}:${SecurityPageName.detections}`, { path: !isEmpty(search[0]) ? search[0] : '', }), diff --git a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/translations.ts b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/translations.ts index 92dc02ac8478c..10223716ef331 100644 --- a/x-pack/plugins/security_solution/public/detections/pages/detection_engine/translations.ts +++ b/x-pack/plugins/security_solution/public/detections/pages/detection_engine/translations.ts @@ -6,6 +6,13 @@ import { i18n } from '@kbn/i18n'; +export const BREADCRUMB_TITLE = i18n.translate( + 'xpack.securitySolution.detectionEngine.detectionsBreadcrumbTitle', + { + defaultMessage: 'Detections', + } +); + export const PAGE_TITLE = i18n.translate( 'xpack.securitySolution.detectionEngine.detectionsPageTitle', { diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.test.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.test.tsx index 8612b15f89857..4f7c14735fe21 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.test.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.test.tsx @@ -232,7 +232,7 @@ describe('Policy Details', () => { ); expect(warningCallout).toHaveLength(1); expect(warningCallout.text()).toEqual( - 'This action will update 5 hostsSaving these changes will apply the updates to all active endpoints assigned to this policy' + 'This action will update 5 hostsSaving these changes will apply updates to all endpoints assigned to this policy' ); }); it('should close dialog if cancel button is clicked', () => { diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.tsx index 9576e1aedcaf1..288bc484c23b5 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_details.tsx @@ -306,7 +306,7 @@ const ConfirmUpdate = React.memo<{ > diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/config_form.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/config_form.tsx index 763931bc2d3d7..8e3c4138efb36 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/config_form.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/config_form.tsx @@ -34,17 +34,10 @@ export const ConfigForm: React.FC<{ */ supportedOss: React.ReactNode; children: React.ReactNode; - /** - * A description for the component. - */ - description: string; - /** - * The `data-test-subj` attribute to append to a certain child element. - */ dataTestSubj: string; /** React Node to be put on the right corner of the card */ rightCorner: React.ReactNode; -}> = React.memo(({ type, supportedOss, children, dataTestSubj, rightCorner, description }) => { +}> = React.memo(({ type, supportedOss, children, dataTestSubj, rightCorner }) => { const typeTitle = useMemo(() => { return ( @@ -85,12 +78,7 @@ export const ConfigForm: React.FC<{ return ( - + {children} diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/linux.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/linux.tsx index d7bae0d2e6bad..66126adb7a4e1 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/linux.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/linux.tsx @@ -102,12 +102,6 @@ export const LinuxEvents = React.memo(() => { type={i18n.translate('xpack.securitySolution.endpoint.policy.details.eventCollection', { defaultMessage: 'Event Collection', })} - description={i18n.translate( - 'xpack.securitySolution.endpoint.policy.details.eventCollectionLabel', - { - defaultMessage: 'Event Collection', - } - )} supportedOss={i18n.translate('xpack.securitySolution.endpoint.policy.details.linux', { defaultMessage: 'Linux', })} diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/mac.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/mac.tsx index 37709ff608857..dc70fc0ba0f4f 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/mac.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/mac.tsx @@ -102,12 +102,6 @@ export const MacEvents = React.memo(() => { type={i18n.translate('xpack.securitySolution.endpoint.policy.details.eventCollection', { defaultMessage: 'Event Collection', })} - description={i18n.translate( - 'xpack.securitySolution.endpoint.policy.details.eventCollectionLabel', - { - defaultMessage: 'Event Collection', - } - )} supportedOss={i18n.translate('xpack.securitySolution.endpoint.policy.details.mac', { defaultMessage: 'Mac', })} diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/windows.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/windows.tsx index 3c7ecae0d9b4e..5acdf67922a3a 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/windows.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/events/windows.tsx @@ -142,9 +142,6 @@ export const WindowsEvents = React.memo(() => { type={i18n.translate('xpack.securitySolution.endpoint.policy.details.eventCollection', { defaultMessage: 'Event Collection', })} - description={i18n.translate('xpack.securitySolution.endpoint.policy.details.windowsLabel', { - defaultMessage: 'Windows', - })} supportedOss={i18n.translate('xpack.securitySolution.endpoint.policy.details.windows', { defaultMessage: 'Windows', })} diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/protections/malware.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/protections/malware.tsx index 23ac6cc5b813d..dee1e27782e69 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/protections/malware.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_forms/protections/malware.tsx @@ -174,9 +174,6 @@ export const MalwareProtections = React.memo(() => { defaultMessage: 'Windows, Mac', })} dataTestSubj="malwareProtectionsForm" - description={i18n.translate('xpack.securitySolution.endpoint.policy.details.malwareLabel', { - defaultMessage: 'Malware', - })} rightCorner={protectionSwitch} > {radioButtons} diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_list.tsx b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_list.tsx index 20b6534f7664e..667aacd9df3bf 100644 --- a/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_list.tsx +++ b/x-pack/plugins/security_solution/public/management/pages/policy/view/policy_list.tsx @@ -322,9 +322,8 @@ export const PolicyList = React.memo(() => { }), render(pkg: Immutable) { return i18n.translate('xpack.securitySolution.endpoint.policyList.versionField', { - defaultMessage: '{title} v{version}', + defaultMessage: 'v{version}', values: { - title: pkg.title, version: pkg.version, }, }); diff --git a/x-pack/plugins/security_solution/public/overview/pages/summary.tsx b/x-pack/plugins/security_solution/public/overview/pages/summary.tsx index 0f20e8bea9dc5..d8260858aa245 100644 --- a/x-pack/plugins/security_solution/public/overview/pages/summary.tsx +++ b/x-pack/plugins/security_solution/public/overview/pages/summary.tsx @@ -71,7 +71,7 @@ export const Summary = React.memo(() => { defaultMessage="If you have input or suggestions regarding your experience with Elastic SIEM, please feel free to {feedback}." values={{ feedback: ( - + { const initialTime = new Date('6/1/2020').getTime(); + const oneMillisecond = new Date(initialTime + 1).getTime(); const oneSecond = new Date(initialTime + 1 * second).getTime(); const oneMinute = new Date(initialTime + 1 * minute).getTime(); const oneHour = new Date(initialTime + 1 * hour).getTime(); @@ -25,6 +26,7 @@ describe('date', () => { const oneMonth = new Date(initialTime + 1 * month).getTime(); const oneYear = new Date(initialTime + 1 * year).getTime(); + const almostASecond = new Date(initialTime + 999).getTime(); const almostAMinute = new Date(initialTime + 59.9 * second).getTime(); const almostAnHour = new Date(initialTime + 59.9 * minute).getTime(); const almostADay = new Date(initialTime + 23.9 * hour).getTime(); @@ -34,6 +36,14 @@ describe('date', () => { const threeYears = new Date(initialTime + 3 * year).getTime(); it('should return the correct singular relative time', () => { + expect(getFriendlyElapsedTime(initialTime, initialTime)).toEqual({ + duration: '<1', + durationType: 'millisecond', + }); + expect(getFriendlyElapsedTime(initialTime, oneMillisecond)).toEqual({ + duration: 1, + durationType: 'millisecond', + }); expect(getFriendlyElapsedTime(initialTime, oneSecond)).toEqual({ duration: 1, durationType: 'second', @@ -65,6 +75,10 @@ describe('date', () => { }); it('should return the correct pluralized relative time', () => { + expect(getFriendlyElapsedTime(initialTime, almostASecond)).toEqual({ + duration: 999, + durationType: 'milliseconds', + }); expect(getFriendlyElapsedTime(initialTime, almostAMinute)).toEqual({ duration: 59, durationType: 'seconds', diff --git a/x-pack/plugins/security_solution/public/resolver/lib/date.ts b/x-pack/plugins/security_solution/public/resolver/lib/date.ts index de0f9dcd7efbe..a5e07e6a02a88 100644 --- a/x-pack/plugins/security_solution/public/resolver/lib/date.ts +++ b/x-pack/plugins/security_solution/public/resolver/lib/date.ts @@ -18,7 +18,6 @@ export const getFriendlyElapsedTime = ( const startTime = typeof from === 'number' ? from : parseInt(from, 10); const endTime = typeof to === 'number' ? to : parseInt(to, 10); const elapsedTimeInMs = endTime - startTime; - if (Number.isNaN(elapsedTimeInMs)) { return null; } @@ -31,45 +30,50 @@ export const getFriendlyElapsedTime = ( const month = day * 30; const year = day * 365; - let duration: number; + let duration: DurationDetails['duration']; let singularType: DurationTypes; let pluralType: DurationTypes; switch (true) { case elapsedTimeInMs >= year: - duration = elapsedTimeInMs / year; + duration = Math.floor(elapsedTimeInMs / year); singularType = 'year'; pluralType = 'years'; break; case elapsedTimeInMs >= month: - duration = elapsedTimeInMs / month; + duration = Math.floor(elapsedTimeInMs / month); singularType = 'month'; pluralType = 'months'; break; case elapsedTimeInMs >= week: - duration = elapsedTimeInMs / week; + duration = Math.floor(elapsedTimeInMs / week); singularType = 'week'; pluralType = 'weeks'; break; case elapsedTimeInMs >= day: - duration = elapsedTimeInMs / day; + duration = Math.floor(elapsedTimeInMs / day); singularType = 'day'; pluralType = 'days'; break; case elapsedTimeInMs >= hour: - duration = elapsedTimeInMs / hour; + duration = Math.floor(elapsedTimeInMs / hour); singularType = 'hour'; pluralType = 'hours'; break; case elapsedTimeInMs >= minute: - duration = elapsedTimeInMs / minute; + duration = Math.floor(elapsedTimeInMs / minute); singularType = 'minute'; pluralType = 'minutes'; break; case elapsedTimeInMs >= second: - duration = elapsedTimeInMs / second; + duration = Math.floor(elapsedTimeInMs / second); singularType = 'second'; pluralType = 'seconds'; break; + case elapsedTimeInMs === 0: + duration = '<1'; + singularType = 'millisecond'; + pluralType = 'millisecond'; // Would never show + break; default: duration = elapsedTimeInMs; singularType = 'millisecond'; @@ -77,6 +81,6 @@ export const getFriendlyElapsedTime = ( break; } - const durationType = duration > 1 ? pluralType : singularType; - return { duration: Math.floor(duration), durationType }; + const durationType = duration === 1 ? singularType : pluralType; + return { duration, durationType }; }; diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.test.ts b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.test.ts index 683f8f1a5f84a..9e1c396723a27 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.test.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.test.ts @@ -12,6 +12,7 @@ import { createStore } from 'redux'; import { mockTreeWithNoAncestorsAnd2Children, mockTreeWith2AncestorsAndNoChildren, + mockTreeWith1AncestorAnd2ChildrenAndAllNodesHave2GraphableEvents, } from '../mocks/resolver_tree'; import { uniquePidForProcess } from '../../models/process_event'; import { EndpointEvent } from '../../../../common/endpoint/types'; @@ -353,4 +354,29 @@ describe('data state', () => { } }); }); + describe('with a tree with 1 ancestor and 2 children, where all nodes have 2 graphable events', () => { + const ancestorID = 'b'; + const originID = 'c'; + const firstChildID = 'd'; + const secondChildID = 'e'; + beforeEach(() => { + const tree = mockTreeWith1AncestorAnd2ChildrenAndAllNodesHave2GraphableEvents({ + ancestorID, + originID, + firstChildID, + secondChildID, + }); + actions.push({ + type: 'serverReturnedResolverData', + payload: { + result: tree, + // this value doesn't matter + databaseDocumentID: '', + }, + }); + }); + it('should have 4 graphable processes', () => { + expect(selectors.graphableProcesses(state()).length).toBe(4); + }); + }); }); diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts index 40138d3f2fd3c..1d65b406306a3 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts @@ -109,8 +109,16 @@ export const terminatedProcesses = createSelector(resolverTreeResponse, function * Process events that will be graphed. */ export const graphableProcesses = createSelector(resolverTreeResponse, function (tree?) { + // Keep track of the last process event (in array order) for each entity ID + const events: Map = new Map(); if (tree) { - return resolverTreeModel.lifecycleEvents(tree).filter(isGraphableProcess); + for (const event of resolverTreeModel.lifecycleEvents(tree)) { + if (isGraphableProcess(event)) { + const entityID = uniquePidForProcess(event); + events.set(entityID, event); + } + } + return [...events.values()]; } else { return []; } diff --git a/x-pack/plugins/security_solution/public/resolver/store/mocks/resolver_tree.ts b/x-pack/plugins/security_solution/public/resolver/store/mocks/resolver_tree.ts index 862cf47f73947..2860eec5a6ab6 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/mocks/resolver_tree.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/mocks/resolver_tree.ts @@ -85,3 +85,81 @@ export function mockTreeWithNoAncestorsAnd2Children({ lifecycle: [origin], } as unknown) as ResolverTree; } + +/** + * Creates a mock tree w/ 2 'graphable' events per node. This simulates the scenario where data has been duplicated in the response from the server. + */ +export function mockTreeWith1AncestorAnd2ChildrenAndAllNodesHave2GraphableEvents({ + ancestorID, + originID, + firstChildID, + secondChildID, +}: { + ancestorID: string; + originID: string; + firstChildID: string; + secondChildID: string; +}): ResolverTree { + const ancestor: ResolverEvent = mockEndpointEvent({ + entityID: ancestorID, + name: ancestorID, + timestamp: 1, + parentEntityId: undefined, + }); + const ancestorClone: ResolverEvent = mockEndpointEvent({ + entityID: ancestorID, + name: ancestorID, + timestamp: 1, + parentEntityId: undefined, + }); + const origin: ResolverEvent = mockEndpointEvent({ + entityID: originID, + name: originID, + parentEntityId: ancestorID, + timestamp: 0, + }); + const originClone: ResolverEvent = mockEndpointEvent({ + entityID: originID, + name: originID, + parentEntityId: ancestorID, + timestamp: 0, + }); + const firstChild: ResolverEvent = mockEndpointEvent({ + entityID: firstChildID, + name: firstChildID, + parentEntityId: originID, + timestamp: 1, + }); + const firstChildClone: ResolverEvent = mockEndpointEvent({ + entityID: firstChildID, + name: firstChildID, + parentEntityId: originID, + timestamp: 1, + }); + const secondChild: ResolverEvent = mockEndpointEvent({ + entityID: secondChildID, + name: secondChildID, + parentEntityId: originID, + timestamp: 2, + }); + const secondChildClone: ResolverEvent = mockEndpointEvent({ + entityID: secondChildID, + name: secondChildID, + parentEntityId: originID, + timestamp: 2, + }); + + return ({ + entityID: originID, + children: { + childNodes: [ + { lifecycle: [firstChild, firstChildClone] }, + { lifecycle: [secondChild, secondChildClone] }, + ], + }, + ancestry: { + ancestors: [{ lifecycle: [ancestor, ancestorClone] }], + }, + lifecycle: [origin, originClone], + } as unknown) as ResolverTree; +} diff --git a/x-pack/plugins/security_solution/public/resolver/store/reducer.ts b/x-pack/plugins/security_solution/public/resolver/store/reducer.ts index 028c28d94a41b..d0f9701fe944e 100644 --- a/x-pack/plugins/security_solution/public/resolver/store/reducer.ts +++ b/x-pack/plugins/security_solution/public/resolver/store/reducer.ts @@ -18,7 +18,14 @@ const uiReducer: Reducer = ( }, action ) => { - if (action.type === 'userFocusedOnResolverNode') { + if (action.type === 'serverReturnedResolverData') { + const next: ResolverUIState = { + ...state, + ariaActiveDescendant: action.payload.result.entityID, + selectedNode: action.payload.result.entityID, + }; + return next; + } else if (action.type === 'userFocusedOnResolverNode') { const next: ResolverUIState = { ...state, ariaActiveDescendant: action.payload, diff --git a/x-pack/plugins/security_solution/public/resolver/types.ts b/x-pack/plugins/security_solution/public/resolver/types.ts index 856ae2d6240e3..02a890ca13ee8 100644 --- a/x-pack/plugins/security_solution/public/resolver/types.ts +++ b/x-pack/plugins/security_solution/public/resolver/types.ts @@ -310,7 +310,7 @@ export type DurationTypes = * duration value and description string */ export interface DurationDetails { - duration: number; + duration: number | '<1'; durationType: DurationTypes; } /** diff --git a/x-pack/plugins/security_solution/public/resolver/view/edge_line.tsx b/x-pack/plugins/security_solution/public/resolver/view/edge_line.tsx index 65c70f94432c7..9f310bb1cc0d6 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/edge_line.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/edge_line.tsx @@ -45,7 +45,7 @@ const StyledElapsedTime = styled.div` left: ${(props) => `${props.leftPct}%`}; padding: 6px 8px; border-radius: 999px; // generate pill shape - transform: translate(-50%, -50%) rotateX(35deg); + transform: translate(-50%, -50%); user-select: none; `; diff --git a/x-pack/plugins/security_solution/public/resolver/view/map.tsx b/x-pack/plugins/security_solution/public/resolver/view/map.tsx index 69ff9c8e2351b..30aa4b63a138d 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/map.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/map.tsx @@ -10,6 +10,7 @@ import React, { useContext } from 'react'; import { useSelector } from 'react-redux'; +import { useEffectOnce } from 'react-use'; import { EuiLoadingSpinner } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import * as selectors from '../store/selectors'; @@ -19,6 +20,7 @@ import { ProcessEventDot } from './process_event_dot'; import { useCamera } from './use_camera'; import { SymbolDefinitions, useResolverTheme } from './assets'; import { useStateSyncingActions } from './use_state_syncing_actions'; +import { useResolverQueryParams } from './use_resolver_query_params'; import { StyledMapContainer, StyledPanel, GraphContainer } from './styles'; import { entityId } from '../../../common/endpoint/models/event'; import { SideEffectContext } from './side_effect_context'; @@ -66,6 +68,10 @@ export const ResolverMap = React.memo(function ({ const hasError = useSelector(selectors.hasError); const activeDescendantId = useSelector(selectors.ariaActiveDescendant); const { colorMap } = useResolverTheme(); + const { cleanUpQueryParams } = useResolverQueryParams(); + useEffectOnce(() => { + return () => cleanUpQueryParams(); + }); return ( diff --git a/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx b/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx index 05f2e0cbfcfa9..aed292e4a39d1 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/process_event_dot.tsx @@ -313,6 +313,14 @@ const UnstyledProcessEventDot = React.memo( { + handleFocus(); + handleClick(); + } /* a11y note: this is strictly an alternate to the button, so no tabindex is necessary*/ + } + role="img" + aria-labelledby={labelHTMLID} style={{ display: 'block', width: '100%', @@ -320,6 +328,8 @@ const UnstyledProcessEventDot = React.memo( position: 'absolute', top: '0', left: '0', + outline: 'transparent', + border: 'none', }} > diff --git a/x-pack/plugins/security_solution/public/resolver/view/submenu.tsx b/x-pack/plugins/security_solution/public/resolver/view/submenu.tsx index 2499a451b9c8c..6a9ab184e9bab 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/submenu.tsx +++ b/x-pack/plugins/security_solution/public/resolver/view/submenu.tsx @@ -190,7 +190,7 @@ const NodeSubMenuComponents = React.memo( * then force the popover to reposition itself. */ popoverRef.current && - !projectionMatrixAtLastRender.current && + projectionMatrixAtLastRender.current && projectionMatrixAtLastRender.current !== projectionMatrix ) { popoverRef.current.positionPopoverFixed(); diff --git a/x-pack/plugins/security_solution/public/resolver/view/use_resolver_query_params.ts b/x-pack/plugins/security_solution/public/resolver/view/use_resolver_query_params.ts index 3c342ae575aa0..84d954de6ef27 100644 --- a/x-pack/plugins/security_solution/public/resolver/view/use_resolver_query_params.ts +++ b/x-pack/plugins/security_solution/public/resolver/view/use_resolver_query_params.ts @@ -63,8 +63,19 @@ export function useResolverQueryParams() { }; }, [urlSearch, uniqueCrumbIdKey, uniqueCrumbEventKey]); + const cleanUpQueryParams = () => { + const crumbsToPass = { + ...querystring.parse(urlSearch.slice(1)), + }; + delete crumbsToPass[uniqueCrumbIdKey]; + delete crumbsToPass[uniqueCrumbEventKey]; + const relativeURL = { search: querystring.stringify(crumbsToPass) }; + history.replace(relativeURL); + }; + return { pushToQueryParams, queryParams, + cleanUpQueryParams, }; } diff --git a/x-pack/plugins/security_solution/public/shared_imports.ts b/x-pack/plugins/security_solution/public/shared_imports.ts index 9939345324f11..b2c7319b94576 100644 --- a/x-pack/plugins/security_solution/public/shared_imports.ts +++ b/x-pack/plugins/security_solution/public/shared_imports.ts @@ -32,6 +32,7 @@ export { useIsMounted, useCursor, useApi, + useAsync, useExceptionList, usePersistExceptionItem, usePersistExceptionList, @@ -50,4 +51,5 @@ export { Pagination, UseExceptionListSuccess, addEndpointExceptionList, + withOptionalSignal, } from '../../lists/public'; diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/index.ts b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/index.ts index fe7a8296608d2..084f892369b51 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/index.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/index.ts @@ -201,7 +201,11 @@ async function findAgent( hostMetadata.elastic.agent.id ); } catch (e) { - if (e.isBoom && e.output.statusCode === 404) { + if ( + metadataRequestContext.requestHandlerContext.core.savedObjects.client.errors.isNotFoundError( + e + ) + ) { metadataRequestContext.logger.warn( `agent with id ${hostMetadata.elastic.agent.id} not found` ); @@ -264,7 +268,11 @@ async function enrichHostMetadata( ); hostStatus = HOST_STATUS_MAPPING.get(status) || HostStatus.ERROR; } catch (e) { - if (e.isBoom && e.output.statusCode === 404) { + if ( + metadataRequestContext.requestHandlerContext.core.savedObjects.client.errors.isNotFoundError( + e + ) + ) { log.warn(`agent with id ${elasticAgentId} not found`); } else { log.error(e); diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts index 8d967656065d1..f3b832de9a786 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/metadata/metadata.test.ts @@ -12,6 +12,7 @@ import { RouteConfig, SavedObjectsClientContract, } from 'kibana/server'; +import { SavedObjectsErrorHelpers } from '../../../../../../../src/core/server/'; import { elasticsearchServiceMock, httpServerMock, @@ -31,7 +32,6 @@ import { createMockEndpointAppContextServiceStartContract, createRouteHandlerContext, } from '../../mocks'; -import Boom from 'boom'; import { EndpointAppContextService } from '../../endpoint_app_context_services'; import { createMockConfig } from '../../../lib/detection_engine/routes/__mocks__'; import { EndpointDocGenerator } from '../../../../common/endpoint/generate_data'; @@ -306,11 +306,11 @@ describe('test endpoint route', () => { }); mockAgentService.getAgentStatusById = jest.fn().mockImplementation(() => { - throw Boom.notFound('Agent not found'); + SavedObjectsErrorHelpers.createGenericNotFoundError(); }); mockAgentService.getAgent = jest.fn().mockImplementation(() => { - throw Boom.notFound('Agent not found'); + SavedObjectsErrorHelpers.createGenericNotFoundError(); }); mockScopedClient.callAsCurrentUser.mockImplementationOnce(() => Promise.resolve(response)); diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts index 13ca51e1f2b39..b52c51ba789af 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts @@ -112,7 +112,7 @@ export class ManifestManager { // Cache the compressed body of the artifact this.cache.set(artifactId, Buffer.from(artifact.body, 'base64')); } catch (err) { - if (err.status === 409) { + if (this.savedObjectsClient.errors.isConflictError(err)) { this.logger.debug(`Tried to create artifact ${artifactId}, but it already exists.`); } else { return err; diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json index 4437612a5056b..ee39661ee9b10 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/collection_cloudtrail_logging_created.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudTrail Log Created", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:CreateTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_CreateTrail.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json index e3e4b7b54c3b2..eb8523b797ddf 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_attempted_bypass_of_okta_mfa.json @@ -9,6 +9,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempted Bypass of Okta MFA", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.attempt_bypass", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json new file mode 100644 index 0000000000000..ddc9e91782136 --- /dev/null +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_aws_iam_assume_role_brute_force.json @@ -0,0 +1,49 @@ +{ + "author": [ + "Elastic" + ], + "description": "Identifies a high number of failed attempts to assume an AWS Identity and Access Management (IAM) role. IAM roles are used to delegate access to users or services. An adversary may attempt to enumerate IAM roles in order to determine if a role exists before attempting to assume or hijack the discovered role.", + "from": "now-20m", + "index": [ + "filebeat-*" + ], + "language": "kuery", + "license": "Elastic License", + "name": "AWS IAM Brute Force of Assume Role Policy", + "note": "The AWS Filebeat module must be enabled to use this rule.", + "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.action:UpdateAssumeRolePolicy and aws.cloudtrail.error_code:MalformedPolicyDocumentException and event.outcome:failure", + "references": [ + "https://www.praetorian.com/blog/aws-iam-assume-role-vulnerabilities", + "https://rhinosecuritylabs.com/aws/assume-worst-aws-assume-role-enumeration/" + ], + "risk_score": 47, + "rule_id": "ea248a02-bc47-4043-8e94-2885b19b2636", + "severity": "medium", + "tags": [ + "AWS", + "Elastic" + ], + "threat": [ + { + "framework": "MITRE ATT&CK", + "tactic": { + "id": "TA0006", + "name": "Credential Access", + "reference": "https://attack.mitre.org/tactics/TA0006/" + }, + "technique": [ + { + "id": "T1110", + "name": "Brute Force", + "reference": "https://attack.mitre.org/techniques/T1110/" + } + ] + } + ], + "threshold": { + "field": "", + "value": 25 + }, + "type": "threshold", + "version": 1 +} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json index 1e268d2f6bf06..ecbf268550b6c 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_iam_user_addition_to_group.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM User Addition to Group", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:AddUserToGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/IAM/latest/APIReference/API_AddUserToGroup.html" diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json new file mode 100644 index 0000000000000..87f20525203f6 --- /dev/null +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_okta_brute_force_or_password_spraying.json @@ -0,0 +1,51 @@ +{ + "author": [ + "Elastic" + ], + "description": "Identifies a high number of failed Okta user authentication attempts from a single IP address, which could be indicative of a brute force or password spraying attack. An adversary may attempt a brute force or password spraying attack to obtain unauthorized access to user accounts.", + "false_positives": [ + "Automated processes that attempt to authenticate using expired credentials and unbounded retries may lead to false positives." + ], + "index": [ + "filebeat-*" + ], + "language": "kuery", + "license": "Elastic License", + "name": "Okta Brute Force or Password Spraying Attack", + "note": "The Okta Filebeat module must be enabled to use this rule.", + "query": "event.module:okta and event.dataset:okta.system and event.category:authentication and event.outcome:failure", + "references": [ + "https://developer.okta.com/docs/reference/api/system-log/", + "https://developer.okta.com/docs/reference/api/event-types/" + ], + "risk_score": 47, + "rule_id": "42bf698b-4738-445b-8231-c834ddefd8a0", + "severity": "medium", + "tags": [ + "Elastic", + "Okta" + ], + "threat": [ + { + "framework": "MITRE ATT&CK", + "tactic": { + "id": "TA0006", + "name": "Credential Access", + "reference": "https://attack.mitre.org/tactics/TA0006/" + }, + "technique": [ + { + "id": "T1110", + "name": "Brute Force", + "reference": "https://attack.mitre.org/techniques/T1110/" + } + ] + } + ], + "threshold": { + "field": "source.ip", + "value": 25 + }, + "type": "threshold", + "version": 1 +} diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json index 740805f71a3cd..f570b7fb3e946 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/credential_access_secretsmanager_getsecretvalue.json @@ -15,6 +15,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Access Secret in Secrets Manager", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.dataset:aws.cloudtrail and event.provider:secretsmanager.amazonaws.com and event.action:GetSecretValue", "references": [ "https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json index 2a74b8fecd809..78f4c9e853f64 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_deleted.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudTrail Log Deleted", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_DeleteTrail.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json index 5d6c1a93bab1d..f412ad9b2e2fd 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudtrail_logging_suspended.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudTrail Log Suspended", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:StopLogging and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_StopLogging.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json index 9ac45ba872809..b76ea0944f855 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_cloudwatch_alarm_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudWatch Alarm Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteAlarms and event.dataset:aws.cloudtrail and event.provider:monitoring.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/cloudwatch/delete-alarms.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json index 9ef37bd4e44e1..353067e6db833 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_config_service_rule_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Config Service Tampering", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.dataset: aws.cloudtrail and event.action: DeleteConfigRule and event.provider: config.amazonaws.com", "references": [ "https://docs.aws.amazon.com/config/latest/developerguide/how-does-config-work.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json index 0aed7aa5ad0ca..b70aa5cd11b52 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_configuration_recorder_stopped.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Configuration Recorder Stopped", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:StopConfigurationRecorder and event.dataset:aws.cloudtrail and event.provider:config.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/configservice/stop-configuration-recorder.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json index b1f6c42f6f61a..a1b0ec0f01d2a 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_flow_log_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS EC2 Flow Log Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteFlowLogs and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-flow-logs.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json index 7dc4e33afcd36..21ce4e498ccaf 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_ec2_network_acl_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS EC2 Network Access Control List Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(DeleteNetworkAcl or DeleteNetworkAclEntry) and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/delete-network-acl.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json index c456396c85cd8..989eff90aaf02 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_guardduty_detector_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS GuardDuty Detector Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteDetector and event.dataset:aws.cloudtrail and event.provider:guardduty.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/guardduty/delete-detector.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json index 77f9e0f4a313c..b1e8d0cd0d3e1 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_s3_bucket_configuration_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS S3 Bucket Configuration Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(DeleteBucketPolicy or DeleteBucketReplication or DeleteBucketCors or DeleteBucketEncryption or DeleteBucketLifecycle) and event.dataset:aws.cloudtrail and event.provider:s3.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketPolicy.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json index 708f931a5f8ab..b2092dc78b012 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_acl_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS WAF Access Control List Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteWebACL and event.dataset:aws.cloudtrail and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/waf-regional/delete-web-acl.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json index 37dae51ec3125..ccec76b7f7974 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/defense_evasion_waf_rule_or_rule_group_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS WAF Rule or Rule Group Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.module:aws and event.dataset:aws.cloudtrail and event.action:(DeleteRule or DeleteRuleGroup) and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/waf/delete-rule-group.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json index 396803086552e..e6a517d85db81 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/elastic_endpoint.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Generates a detection alert each time an Elastic Endpoint alert is received. Enabling this rule allows you to immediately begin investigating your Elastic Endpoint alerts.", + "description": "Generates a detection alert each time an Elastic Endpoint Security alert is received. Enabling this rule allows you to immediately begin investigating your Elastic Endpoint alerts.", "enabled": true, "exceptions_list": [ { @@ -18,7 +18,7 @@ "language": "kuery", "license": "Elastic License", "max_signals": 10000, - "name": "Elastic Endpoint", + "name": "Elastic Endpoint Security", "query": "event.kind:alert and event.module:(endpoint and not endgame)", "risk_score": 47, "risk_score_mapping": [ diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json index 5075630e24f29..16584a03a3c91 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_adversary_behavior_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected an Adversary Behavior. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected an Adversary Behavior. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Adversary Behavior - Detected - Elastic Endpoint", + "name": "Adversary Behavior - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and (event.action:rules_engine_event or endgame.event_subtype_full:rules_engine_event)", "risk_score": 47, "rule_id": "77a3c3df-8ec4-4da4-b758-878f551dee69", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json index 4bf9ba8ec36e1..5717c490114b9 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Credential Dumping. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Credential Dumping. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Credential Dumping - Detected - Elastic Endpoint", + "name": "Credential Dumping - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:cred_theft_event or endgame.event_subtype_full:cred_theft_event)", "risk_score": 73, "rule_id": "571afc56-5ed9-465d-a2a9-045f099f6e7e", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json index bed473b12b046..5c1b2cb02b841 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_dumping_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Credential Dumping. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Credential Dumping. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Credential Dumping - Prevented - Elastic Endpoint", + "name": "Credential Dumping - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:cred_theft_event or endgame.event_subtype_full:cred_theft_event)", "risk_score": 47, "rule_id": "db8c33a8-03cd-4988-9e2c-d0a4863adb13", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json index 02ba20bb59aec..16ad12a94ec40 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Credential Manipulation. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Credential Manipulation. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Credential Manipulation - Detected - Elastic Endpoint", + "name": "Credential Manipulation - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:token_manipulation_event or endgame.event_subtype_full:token_manipulation_event)", "risk_score": 73, "rule_id": "c0be5f31-e180-48ed-aa08-96b36899d48f", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json index 128f8d5639d5d..9addcbf2fba30 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_cred_manipulation_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Credential Manipulation. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Credential Manipulation. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Credential Manipulation - Prevented - Elastic Endpoint", + "name": "Credential Manipulation - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:token_manipulation_event or endgame.event_subtype_full:token_manipulation_event)", "risk_score": 47, "rule_id": "c9e38e64-3f4c-4bf3-ad48-0e61a60ea1fa", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json index a11b839792b79..f51a38781c953 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected an Exploit. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected an Exploit. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Exploit - Detected - Elastic Endpoint", + "name": "Exploit - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:exploit_event or endgame.event_subtype_full:exploit_event)", "risk_score": 73, "rule_id": "2003cdc8-8d83-4aa5-b132-1f9a8eb48514", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json index 2deb7bce3b203..8b96c5a63fbef 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_exploit_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented an Exploit. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented an Exploit. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Exploit - Prevented - Elastic Endpoint", + "name": "Exploit - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:exploit_event or endgame.event_subtype_full:exploit_event)", "risk_score": 47, "rule_id": "2863ffeb-bf77-44dd-b7a5-93ef94b72036", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json index d1389b21f2d7e..28ff73468deb4 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Malware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Malware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Malware - Detected - Elastic Endpoint", + "name": "Malware - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:file_classification_event or endgame.event_subtype_full:file_classification_event)", "risk_score": 99, "rule_id": "0a97b20f-4144-49ea-be32-b540ecc445de", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json index b83bc259175c6..3d32abf2bf8f2 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_malware_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Malware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Malware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Malware - Prevented - Elastic Endpoint", + "name": "Malware - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:file_classification_event or endgame.event_subtype_full:file_classification_event)", "risk_score": 73, "rule_id": "3b382770-efbb-44f4-beed-f5e0a051b895", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json index b81b9c67644c6..a89a7f7d5918c 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Permission Theft. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Permission Theft. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Permission Theft - Detected - Elastic Endpoint", + "name": "Permission Theft - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:token_protection_event or endgame.event_subtype_full:token_protection_event)", "risk_score": 73, "rule_id": "c3167e1b-f73c-41be-b60b-87f4df707fe3", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json index b69598cffc230..fb9dbe3dadb17 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_permission_theft_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Permission Theft. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Permission Theft. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Permission Theft - Prevented - Elastic Endpoint", + "name": "Permission Theft - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:token_protection_event or endgame.event_subtype_full:token_protection_event)", "risk_score": 47, "rule_id": "453f659e-0429-40b1-bfdb-b6957286e04b", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json index 8299e11392398..e022d058d7560 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Process Injection. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Process Injection. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Process Injection - Detected - Elastic Endpoint", + "name": "Process Injection - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:kernel_shellcode_event or endgame.event_subtype_full:kernel_shellcode_event)", "risk_score": 73, "rule_id": "80c52164-c82a-402c-9964-852533d58be1", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json index 237558ae372a8..2d189707293f1 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_process_injection_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Process Injection. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Process Injection. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Process Injection - Prevented - Elastic Endpoint", + "name": "Process Injection - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:kernel_shellcode_event or endgame.event_subtype_full:kernel_shellcode_event)", "risk_score": 47, "rule_id": "990838aa-a953-4f3e-b3cb-6ddf7584de9e", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json index 4ead850c60e8f..077c20bca5d8e 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_detected.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint detected Ransomware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security detected Ransomware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Ransomware - Detected - Elastic Endpoint", + "name": "Ransomware - Detected - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:detection and (event.action:ransomware_event or endgame.event_subtype_full:ransomware_event)", "risk_score": 99, "rule_id": "8cb4f625-7743-4dfb-ae1b-ad92be9df7bd", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json index 25d167afa204c..b615fcb04895e 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/endpoint_ransomware_prevented.json @@ -2,7 +2,7 @@ "author": [ "Elastic" ], - "description": "Elastic Endpoint prevented Ransomware. Click the Elastic Endpoint icon in the event.module column or the link in the rule.reference column in the External Alerts tab of the SIEM Detections page for additional information.", + "description": "Elastic Endpoint Security prevented Ransomware. Click the Elastic Endpoint Security icon in the event.module column or the link in the rule.reference column for additional information.", "from": "now-15m", "index": [ "endgame-*" @@ -10,7 +10,7 @@ "interval": "10m", "language": "kuery", "license": "Elastic License", - "name": "Ransomware - Prevented - Elastic Endpoint", + "name": "Ransomware - Prevented - Elastic Endpoint Security", "query": "event.kind:alert and event.module:endgame and endgame.metadata.type:prevention and (event.action:ransomware_event or endgame.event_subtype_full:ransomware_event)", "risk_score": 73, "rule_id": "e3c5d5cb-41d5-4206-805c-f30561eae3ac", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json index 90338f4460725..a9f8ee1af8bf6 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/execution_via_system_manager.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Execution via System Manager", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:ssm.amazonaws.com and event.action:SendCommand and event.outcome:success", "references": [ "https://docs.aws.amazon.com/systems-manager/latest/userguide/ssm-plugins.html" diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json index 04cc697cf36f9..25711afbb4c66 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/exfiltration_ec2_snapshot_change_activity.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS EC2 Snapshot Activity", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.action:ModifySnapshotAttribute", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/modify-snapshot-attribute.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json index c8ebb2ed0e5d7..678ad9eb03b50 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/external_alerts.json @@ -2,7 +2,16 @@ "author": [ "Elastic" ], - "description": "Generates a detection alert for each external alert written to the configured securitySolution:defaultIndex. Enabling this rule allows you to immediately begin investigating external alerts in the app.", + "description": "Generates a detection alert for each external alert written to the configured indices. Enabling this rule allows you to immediately begin investigating external alerts in the app.", + "index": [ + "apm-*-transaction*", + "auditbeat-*", + "endgame-*", + "filebeat-*", + "logs-*", + "packetbeat-*", + "winlogbeat-*" + ], "language": "kuery", "license": "Elastic License", "max_signals": 10000, diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json index 0f4ded9fcfe87..27e50313c8f82 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_attempt_to_revoke_okta_api_token.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Revoke Okta API Token", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:system.api_token.revoke", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json index d969ef21027f0..0bafa56c9af49 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudtrail_logging_updated.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudTrail Log Updated", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:UpdateTrail and event.dataset:aws.cloudtrail and event.provider:cloudtrail.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_UpdateTrail.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json index d33593d4a44b2..74b5e0d93c441 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_group_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudWatch Log Group Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteLogGroup and event.dataset:aws.cloudtrail and event.provider:logs.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/delete-log-group.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json index a1108dd07abdd..59c659117c098 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_cloudwatch_log_stream_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS CloudWatch Log Stream Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteLogStream and event.dataset:aws.cloudtrail and event.provider:logs.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/delete-log-stream.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json index 4681b475d92e7..10a1989ad6423 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_ec2_disable_ebs_encryption.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS EC2 Encryption Disabled", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DisableEbsEncryptionByDefault and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success", "references": [ "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json index f873e3483a34f..4aa0b355171fe 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_deactivate_mfa_device.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM Deactivation of MFA Device", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeactivateMFADevice and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/deactivate-mfa-device.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json index 23364c8b3aa28..25b300d33cce1 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_iam_group_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM Group Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:DeleteGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/delete-group.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json index 8c76f182442a5..9ca8b7ed21acb 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_possible_okta_dos_attack.json @@ -9,6 +9,7 @@ "language": "kuery", "license": "Elastic License", "name": "Possible Okta DoS Attack", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:(application.integration.rate_limit_exceeded or system.org.rate_limit.warning or system.org.rate_limit.violation or core.concurrency.org.limit.violation)", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json index 88ec942b0e5e5..e8343f1b7b7c6 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_cluster_deletion.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS RDS Cluster Deletion", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(DeleteDBCluster or DeleteGlobalCluster) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/delete-db-cluster.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json index 2c25781e24d19..8c4387e60d281 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/impact_rds_instance_cluster_stoppage.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS RDS Instance/Cluster Stoppage", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(StopDBCluster or StopDBInstance) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/stop-db-cluster.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts index f2e2137eec41b..685c869630ca3 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/index.ts @@ -210,6 +210,8 @@ import rule198 from './ml_cloudtrail_rare_error_code.json'; import rule199 from './ml_cloudtrail_rare_method_by_city.json'; import rule200 from './ml_cloudtrail_rare_method_by_country.json'; import rule201 from './ml_cloudtrail_rare_method_by_user.json'; +import rule202 from './credential_access_aws_iam_assume_role_brute_force.json'; +import rule203 from './credential_access_okta_brute_force_or_password_spraying.json'; export const rawRules = [ rule1, @@ -413,4 +415,6 @@ export const rawRules = [ rule199, rule200, rule201, + rule202, + rule203, ]; diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json index 0f761f0d2a5f5..829d87c1964c9 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_console_login_root.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Management Console Root Login", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:ConsoleLogin and event.module:aws and event.dataset:aws.cloudtrail and event.provider:signin.amazonaws.com and aws.cloudtrail.user_identity.type:Root and event.outcome:success", "references": [ "https://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user.html" diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json index 1042ce19a14c7..7429c69fc3174 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_password_recovery.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM Password Recovery Requested", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:PasswordRecoveryRequested and event.provider:signin.amazonaws.com and event.outcome:success", "references": [ "https://www.cadosecurity.com/2020/06/11/an-ongoing-aws-phishing-campaign/" diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json index 5fa8a655c08bf..25bf7dd287d05 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/initial_access_suspicious_activity_reported_by_okta_user.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Suspicious Activity Reported by Okta User", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:user.account.report_suspicious_activity_by_enduser", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json index 737044d5a9bdc..1d15db83bb18e 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_deactivate_okta_mfa_rule.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Deactivate Okta MFA Rule", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:policy.rule.deactivate", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json index ea8ba7223095f..6df2ed6cb34a4 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_delete_okta_policy.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Delete Okta Policy", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.delete", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json index dfe16f56da0e2..e276166f6130b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_mfa_rule.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Modify Okta MFA Rule", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:(policy.rule.update or policy.rule.delete)", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json index 61c45f8e7d85e..bdfe7d25092ba 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_network_zone.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Modify Okta Network Zone", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:(zone.update or zone.deactivate or zone.delete or network_zone.rule.disabled or zone.remove_blacklist)", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json index a864b900a5998..e3e0d5fef7b2f 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_okta_policy.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Modify Okta Policy", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.update", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json index ff7546ac2f1a6..ad21ebe065f8c 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_attempt_to_modify_or_delete_application_sign_on_policy.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Modification or Removal of an Okta Application Sign-On Policy", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:(application.policy.sign_on.update or application.policy.sign_on.rule.delete)", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json index 7a1b6e3d82d7c..e92cf3d67d313 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/okta_threat_detected_by_okta_threatinsight.json @@ -9,6 +9,7 @@ "language": "kuery", "license": "Elastic License", "name": "Threat Detected by Okta ThreatInsight", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:security.threat.detected", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json index 70e7eb1706e1b..d5f3995fb8bcc 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_administrator_privileges_assigned_to_okta_group.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Administrator Privileges Assigned to Okta Group", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:group.privilege.grant", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json index 453580d580344..5f6c006c5d177 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_create_okta_api_token.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Create Okta API Token", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:system.api_token.create", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json index e5648285c5289..d3a66ef8d9c77 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_mfa_for_okta_user_account.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Deactivate MFA for Okta User Account", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.factor.deactivate", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json index 53da259042738..7104cace1c5d9 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_deactivate_okta_policy.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Deactivate Okta Policy", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:policy.lifecycle.deactivate", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json index f662c0c0b8eb6..c38f71d8e00a6 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_attempt_to_reset_mfa_factors_for_okta_user_account.json @@ -12,6 +12,7 @@ "language": "kuery", "license": "Elastic License", "name": "Attempt to Reset MFA Factors for Okta User Account", + "note": "The Okta Filebeat module must be enabled to use this rule.", "query": "event.module:okta and event.dataset:okta.system and event.action:user.mfa.factor.reset_all", "references": [ "https://developer.okta.com/docs/reference/api/system-log/", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json index 911536d2567f4..99bb07fe9660e 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_ec2_network_acl_creation.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS EC2 Network Access Control List Creation", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(CreateNetworkAcl or CreateNetworkAclEntry) and event.dataset:aws.cloudtrail and event.provider:ec2.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/create-network-acl.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json index 7c1c4d02737a6..9b2478b97fb38 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_iam_group_creation.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM Group Creation", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:CreateGroup and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/create-group.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json index c6e23acab0fb5..94a695a97a27a 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/persistence_rds_cluster_creation.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS RDS Cluster Creation", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.action:(CreateDBCluster or CreateGlobalCluster) and event.dataset:aws.cloudtrail and event.provider:rds.amazonaws.com and event.outcome:success", "references": [ "https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/create-db-cluster.html", diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json index 6db9e04edc0cb..74c5376100b2b 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_root_login_without_mfa.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS Root Login Without MFA", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:signin.amazonaws.com and event.action:ConsoleLogin and aws.cloudtrail.user_identity.type:Root and aws.cloudtrail.console_login.additional_eventdata.mfa_used:false and event.outcome:success", "references": [ "https://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user.html" diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json index 623f90716b2b6..7ce54b00f211c 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rules/prepackaged_rules/privilege_escalation_updateassumerolepolicy.json @@ -14,6 +14,7 @@ "language": "kuery", "license": "Elastic License", "name": "AWS IAM Assume Role Policy Update", + "note": "The AWS Filebeat module must be enabled to use this rule.", "query": "event.module:aws and event.dataset:aws.cloudtrail and event.provider:iam.amazonaws.com and event.action:UpdateAssumeRolePolicy and event.outcome:success", "references": [ "https://labs.bishopfox.com/tech-blog/5-privesc-attack-vectors-in-aws" diff --git a/x-pack/plugins/task_manager/server/lib/bulk_operation_buffer.test.ts b/x-pack/plugins/task_manager/server/lib/bulk_operation_buffer.test.ts index 3a21f622cec17..f32a755515a95 100644 --- a/x-pack/plugins/task_manager/server/lib/bulk_operation_buffer.test.ts +++ b/x-pack/plugins/task_manager/server/lib/bulk_operation_buffer.test.ts @@ -33,8 +33,7 @@ function errorAttempts(task: TaskInstance): Err { +describe('Bulk Operation Buffer', () => { describe('createBuffer()', () => { test('batches up multiple Operation calls', async () => { const bulkUpdate: jest.Mocked> = jest.fn( @@ -67,8 +66,6 @@ describe.skip('Bulk Operation Buffer', () => { const task2 = createTask(); const task3 = createTask(); const task4 = createTask(); - const task5 = createTask(); - const task6 = createTask(); return new Promise((resolve) => { Promise.all([bufferedUpdate(task1), bufferedUpdate(task2)]).then((_) => { @@ -79,22 +76,18 @@ describe.skip('Bulk Operation Buffer', () => { setTimeout(() => { // on next tick - setTimeout(() => { - // on next tick - expect(bulkUpdate).toHaveBeenCalledTimes(2); - Promise.all([bufferedUpdate(task5), bufferedUpdate(task6)]).then((_) => { - expect(bulkUpdate).toHaveBeenCalledTimes(3); - expect(bulkUpdate).toHaveBeenCalledWith([task5, task6]); - resolve(); - }); - }, bufferMaxDuration + 1); - expect(bulkUpdate).toHaveBeenCalledTimes(1); Promise.all([bufferedUpdate(task3), bufferedUpdate(task4)]).then((_) => { expect(bulkUpdate).toHaveBeenCalledTimes(2); expect(bulkUpdate).toHaveBeenCalledWith([task3, task4]); }); - }, bufferMaxDuration + 1); + + setTimeout(() => { + // on next tick + expect(bulkUpdate).toHaveBeenCalledTimes(2); + resolve(); + }, bufferMaxDuration * 1.1); + }, bufferMaxDuration * 1.1); }); }); @@ -103,8 +96,9 @@ describe.skip('Bulk Operation Buffer', () => { return Promise.resolve(tasks.map(incrementAttempts)); }); + const bufferMaxDuration = 1000; const bufferedUpdate = createBuffer(bulkUpdate, { - bufferMaxDuration: 100, + bufferMaxDuration, bufferMaxOperations: 2, }); @@ -114,26 +108,19 @@ describe.skip('Bulk Operation Buffer', () => { const task4 = createTask(); const task5 = createTask(); - return new Promise((resolve) => { - bufferedUpdate(task1); - bufferedUpdate(task2); - bufferedUpdate(task3); - bufferedUpdate(task4); - - setTimeout(() => { - expect(bulkUpdate).toHaveBeenCalledTimes(2); - expect(bulkUpdate).toHaveBeenCalledWith([task1, task2]); - expect(bulkUpdate).toHaveBeenCalledWith([task3, task4]); - - setTimeout(() => { - expect(bulkUpdate).toHaveBeenCalledTimes(2); - bufferedUpdate(task5).then((_) => { - expect(bulkUpdate).toHaveBeenCalledTimes(3); - expect(bulkUpdate).toHaveBeenCalledWith([task5]); - resolve(); - }); - }, 50); - }, 50); + return Promise.all([ + bufferedUpdate(task1), + bufferedUpdate(task2), + bufferedUpdate(task3), + bufferedUpdate(task4), + ]).then(() => { + expect(bulkUpdate).toHaveBeenCalledTimes(2); + expect(bulkUpdate).toHaveBeenCalledWith([task1, task2]); + expect(bulkUpdate).toHaveBeenCalledWith([task3, task4]); + return bufferedUpdate(task5).then((_) => { + expect(bulkUpdate).toHaveBeenCalledTimes(3); + expect(bulkUpdate).toHaveBeenCalledWith([task5]); + }); }); }); @@ -153,29 +140,26 @@ describe.skip('Bulk Operation Buffer', () => { const task3 = createTask(); const task4 = createTask(); - return new Promise((resolve) => { - bufferedUpdate(task1); - bufferedUpdate(task2); - - setTimeout(() => { - expect(bulkUpdate).toHaveBeenCalledTimes(1); - expect(bulkUpdate).toHaveBeenCalledWith([task1, task2]); + return Promise.all([bufferedUpdate(task1), bufferedUpdate(task2)]).then(() => { + expect(bulkUpdate).toHaveBeenCalledTimes(1); + expect(bulkUpdate).toHaveBeenCalledWith([task1, task2]); - bufferedUpdate(task3); - bufferedUpdate(task4); + return new Promise((resolve) => { + const futureUpdates = Promise.all([bufferedUpdate(task3), bufferedUpdate(task4)]); setTimeout(() => { expect(bulkUpdate).toHaveBeenCalledTimes(1); - setTimeout(() => { + futureUpdates.then(() => { expect(bulkUpdate).toHaveBeenCalledTimes(2); expect(bulkUpdate).toHaveBeenCalledWith([task3, task4]); resolve(); - }, bufferMaxDuration / 2); + }); }, bufferMaxDuration / 2); - }, bufferMaxDuration + 1); + }); }); }); + test('handles both resolutions and rejections at individual task level', async (done) => { const bulkUpdate: jest.Mocked> = jest.fn( ([task1, task2, task3]) => { diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index 846330146cf07..cf789d1e7c450 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -10239,7 +10239,6 @@ "xpack.ml.newJob.recognize.advancedLabel": "高度な設定", "xpack.ml.newJob.recognize.advancedSettingsAriaLabel": "高度な設定", "xpack.ml.newJob.recognize.alreadyExistsLabel": "(既に存在します)", - "xpack.ml.newJob.recognize.analysisRunningLabel": "分析を実行中", "xpack.ml.newJob.recognize.cancelJobOverrideLabel": "閉じる", "xpack.ml.newJob.recognize.createJobButtonAriaLabel": "ジョブを作成", "xpack.ml.newJob.recognize.createJobButtonLabel": "{numberOfJobs, plural, zero {Job} one {Job} other {Jobs}} を作成", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index 477858d2e74d1..5b81804faf715 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -10244,7 +10244,6 @@ "xpack.ml.newJob.recognize.advancedLabel": "高级", "xpack.ml.newJob.recognize.advancedSettingsAriaLabel": "高级设置", "xpack.ml.newJob.recognize.alreadyExistsLabel": "(已存在)", - "xpack.ml.newJob.recognize.analysisRunningLabel": "分析正在运行", "xpack.ml.newJob.recognize.cancelJobOverrideLabel": "关闭", "xpack.ml.newJob.recognize.createJobButtonAriaLabel": "创建作业", "xpack.ml.newJob.recognize.createJobButtonLabel": "创建{numberOfJobs, plural, zero {作业} one {Job} other {Jobs}}", diff --git a/x-pack/plugins/triggers_actions_ui/README.md b/x-pack/plugins/triggers_actions_ui/README.md index 0dd2d100401f0..b8e765c9ea635 100644 --- a/x-pack/plugins/triggers_actions_ui/README.md +++ b/x-pack/plugins/triggers_actions_ui/README.md @@ -1294,7 +1294,7 @@ Then this dependencies will be used to embed Actions form or register your own a return ( { initialAlert.actions[index].id = id; @@ -1329,7 +1329,7 @@ interface ActionAccordionFormProps { 'get$' | 'add' | 'remove' | 'addSuccess' | 'addWarning' | 'addDanger' | 'addError' >; actionTypes?: ActionType[]; - messageVariables?: string[]; + messageVariables?: ActionVariable[]; defaultActionMessage?: string; consumer: string; } diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.scss b/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.scss index 996f21c4b6b09..521d0f399b19b 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.scss +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.scss @@ -1,4 +1,5 @@ .messageVariablesPanel { @include euiYScrollWithShadows; max-height: $euiSize * 20; + max-width: $euiSize * 20; } \ No newline at end of file diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.tsx b/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.tsx index 655f64995d147..0742ed8a778ef 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/add_message_variables.tsx @@ -5,11 +5,18 @@ */ import React, { useState } from 'react'; import { i18n } from '@kbn/i18n'; -import { EuiPopover, EuiButtonIcon, EuiContextMenuPanel, EuiContextMenuItem } from '@elastic/eui'; +import { + EuiPopover, + EuiButtonIcon, + EuiContextMenuPanel, + EuiContextMenuItem, + EuiText, +} from '@elastic/eui'; import './add_message_variables.scss'; +import { ActionVariable } from '../../types'; interface Props { - messageVariables: string[] | undefined; + messageVariables?: ActionVariable[]; paramsProperty: string; onSelectEventHandler: (variable: string) => void; } @@ -22,17 +29,22 @@ export const AddMessageVariables: React.FunctionComponent = ({ const [isVariablesPopoverOpen, setIsVariablesPopoverOpen] = useState(false); const getMessageVariables = () => - messageVariables?.map((variable: string, i: number) => ( + messageVariables?.map((variable: ActionVariable, i: number) => ( { - onSelectEventHandler(variable); + onSelectEventHandler(variable.name); setIsVariablesPopoverOpen(false); }} > - {`{{${variable}}}`} + <> + {`{{${variable.name}}}`} + +
{variable.description}
+ + )); diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/builtin_action_types/servicenow/servicenow_params.tsx b/x-pack/plugins/triggers_actions_ui/public/application/components/builtin_action_types/servicenow/servicenow_params.tsx index 1e0f4d1fdc57c..2a29018d83ff4 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/builtin_action_types/servicenow/servicenow_params.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/builtin_action_types/servicenow/servicenow_params.tsx @@ -61,7 +61,7 @@ const ServiceNowParamsFields: React.FunctionComponent variable === 'alertId')) { + if (!savedObjectId && messageVariables?.find((variable) => variable.name === 'alertId')) { editSubActionProperty('savedObjectId', '{{alertId}}'); } if (!urgency) { diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/json_editor_with_message_variables.tsx b/x-pack/plugins/triggers_actions_ui/public/application/components/json_editor_with_message_variables.tsx index 473c0fe9609ce..0b8184fc441fd 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/json_editor_with_message_variables.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/json_editor_with_message_variables.tsx @@ -9,9 +9,10 @@ import './add_message_variables.scss'; import { useXJsonMode } from '../../../../../../src/plugins/es_ui_shared/static/ace_x_json/hooks'; import { AddMessageVariables } from './add_message_variables'; +import { ActionVariable } from '../../types'; interface Props { - messageVariables: string[] | undefined; + messageVariables?: ActionVariable[]; paramsProperty: string; inputTargetValue: string; label: string; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/text_area_with_message_variables.tsx b/x-pack/plugins/triggers_actions_ui/public/application/components/text_area_with_message_variables.tsx index 0b8a9349ad5fb..e60785f70bffe 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/text_area_with_message_variables.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/text_area_with_message_variables.tsx @@ -7,9 +7,10 @@ import React, { useState } from 'react'; import { EuiTextArea, EuiFormRow } from '@elastic/eui'; import './add_message_variables.scss'; import { AddMessageVariables } from './add_message_variables'; +import { ActionVariable } from '../../types'; interface Props { - messageVariables: string[] | undefined; + messageVariables?: ActionVariable[]; paramsProperty: string; index: number; inputTargetValue?: string; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/components/text_field_with_message_variables.tsx b/x-pack/plugins/triggers_actions_ui/public/application/components/text_field_with_message_variables.tsx index e280fd3f34e99..fc05b237ccf5e 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/components/text_field_with_message_variables.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/components/text_field_with_message_variables.tsx @@ -7,9 +7,10 @@ import React, { useState } from 'react'; import { EuiFieldText } from '@elastic/eui'; import './add_message_variables.scss'; import { AddMessageVariables } from './add_message_variables'; +import { ActionVariable } from '../../types'; interface Props { - messageVariables: string[] | undefined; + messageVariables?: ActionVariable[]; paramsProperty: string; index: number; inputTargetValue?: string; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.test.ts b/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.test.ts index ddd03df8bee6b..c5009fad32942 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.test.ts +++ b/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.test.ts @@ -12,7 +12,7 @@ beforeEach(() => jest.resetAllMocks()); describe('actionVariablesFromAlertType', () => { test('should return correct variables when no state or context provided', async () => { - const alertType = getAlertType({ context: [], state: [] }); + const alertType = getAlertType({ context: [], state: [], params: [] }); expect(actionVariablesFromAlertType(alertType)).toMatchInlineSnapshot(` Array [ Object { @@ -46,6 +46,7 @@ describe('actionVariablesFromAlertType', () => { { name: 'bar', description: 'bar-description' }, ], state: [], + params: [], }); expect(actionVariablesFromAlertType(alertType)).toMatchInlineSnapshot(` Array [ @@ -88,6 +89,7 @@ describe('actionVariablesFromAlertType', () => { { name: 'foo', description: 'foo-description' }, { name: 'bar', description: 'bar-description' }, ], + params: [], }); expect(actionVariablesFromAlertType(alertType)).toMatchInlineSnapshot(` Array [ @@ -133,6 +135,7 @@ describe('actionVariablesFromAlertType', () => { { name: 'fooS', description: 'fooS-description' }, { name: 'barS', description: 'barS-description' }, ], + params: [{ name: 'fooP', description: 'fooP-description' }], }); expect(actionVariablesFromAlertType(alertType)).toMatchInlineSnapshot(` Array [ @@ -164,6 +167,10 @@ describe('actionVariablesFromAlertType', () => { "description": "barC-description", "name": "context.barC", }, + Object { + "description": "fooP-description", + "name": "params.fooP", + }, Object { "description": "fooS-description", "name": "state.fooS", diff --git a/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.ts b/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.ts index 714dc5210e390..8bbe34847016d 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.ts +++ b/x-pack/plugins/triggers_actions_ui/public/application/lib/action_variables.ts @@ -11,9 +11,10 @@ import { AlertType, ActionVariable } from '../../types'; export function actionVariablesFromAlertType(alertType: AlertType): ActionVariable[] { const alwaysProvidedVars = getAlwaysProvidedActionVariables(); const contextVars = prefixKeys(alertType.actionVariables.context, 'context.'); + const paramsVars = prefixKeys(alertType.actionVariables.params, 'params.'); const stateVars = prefixKeys(alertType.actionVariables.state, 'state.'); - return alwaysProvidedVars.concat(contextVars, stateVars); + return alwaysProvidedVars.concat(contextVars, paramsVars, stateVars); } function prefixKeys(actionVariables: ActionVariable[], prefix: string): ActionVariable[] { diff --git a/x-pack/plugins/triggers_actions_ui/public/application/lib/alert_api.test.ts b/x-pack/plugins/triggers_actions_ui/public/application/lib/alert_api.test.ts index 23caf2cfb31a8..fc5d301cb7cd0 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/lib/alert_api.test.ts +++ b/x-pack/plugins/triggers_actions_ui/public/application/lib/alert_api.test.ts @@ -42,6 +42,7 @@ describe('loadAlertTypes', () => { actionVariables: { context: [{ name: 'var1', description: 'val1' }], state: [{ name: 'var2', description: 'val2' }], + params: [{ name: 'var3', description: 'val3' }], }, producer: ALERTS_FEATURE_ID, actionGroups: [{ id: 'default', name: 'Default' }], diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.test.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.test.tsx index c21cce4cc4b62..7ee1e0d3f3fa6 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.test.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.test.tsx @@ -217,7 +217,10 @@ describe('action_form', () => { wrapper = mountWithIntl( { initialAlert.actions[index].id = id; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.tsx index af10f583dd413..2d4507ca93078 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/action_form.tsx @@ -38,6 +38,7 @@ import { ActionTypeIndex, ActionConnector, ActionType, + ActionVariable, } from '../../../types'; import { SectionLoading } from '../../components/section_loading'; import { ConnectorAddModal } from './connector_add_modal'; @@ -61,7 +62,7 @@ interface ActionAccordionFormProps { >; docLinks: DocLinksStart; actionTypes?: ActionType[]; - messageVariables?: string[]; + messageVariables?: ActionVariable[]; defaultActionMessage?: string; setHasActionsDisabled?: (value: boolean) => void; capabilities: ApplicationStart['capabilities']; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_details/components/alert_details.test.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_details/components/alert_details.test.tsx index ccaa180de0edc..a620a0db45408 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_details/components/alert_details.test.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_details/components/alert_details.test.tsx @@ -93,7 +93,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -132,7 +132,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -162,7 +162,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -216,7 +216,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -275,7 +275,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -295,7 +295,7 @@ describe('alert_details', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -324,7 +324,7 @@ describe('disable button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -352,7 +352,7 @@ describe('disable button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -380,7 +380,7 @@ describe('disable button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -417,7 +417,7 @@ describe('disable button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -457,7 +457,7 @@ describe('mute button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -486,7 +486,7 @@ describe('mute button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -515,7 +515,7 @@ describe('mute button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -553,7 +553,7 @@ describe('mute button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -591,7 +591,7 @@ describe('mute button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: ALERTS_FEATURE_ID, authorizedConsumers, @@ -641,7 +641,7 @@ describe('edit button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: 'alerting', authorizedConsumers, @@ -683,7 +683,7 @@ describe('edit button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: 'alerting', authorizedConsumers, @@ -718,7 +718,7 @@ describe('edit button', () => { id: '.noop', name: 'No Op', actionGroups: [{ id: 'default', name: 'Default' }], - actionVariables: { context: [], state: [] }, + actionVariables: { context: [], state: [], params: [] }, defaultActionGroupId: 'default', producer: 'alerting', authorizedConsumers, diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_add.test.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_add.test.tsx index 10efabd70aded..3803fcebbb92d 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_add.test.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_add.test.tsx @@ -68,6 +68,7 @@ describe('alert_add', () => { actionVariables: { context: [], state: [], + params: [], }, }, ]; diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_form.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_form.tsx index 47ec2c436ca50..9d54baf359af5 100644 --- a/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_form.tsx +++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/alert_form/alert_form.tsx @@ -269,8 +269,8 @@ export const AlertForm = ({ setHasActionsDisabled={setHasActionsDisabled} messageVariables={ alertTypesIndex && alertTypesIndex.has(alert.alertTypeId) - ? actionVariablesFromAlertType(alertTypesIndex.get(alert.alertTypeId)!).map( - (av) => av.name + ? actionVariablesFromAlertType(alertTypesIndex.get(alert.alertTypeId)!).sort((a, b) => + a.name.toUpperCase().localeCompare(b.name.toUpperCase()) ) : undefined } diff --git a/x-pack/plugins/triggers_actions_ui/public/index.ts b/x-pack/plugins/triggers_actions_ui/public/index.ts index 55653f49001b9..1048e15eb1184 100644 --- a/x-pack/plugins/triggers_actions_ui/public/index.ts +++ b/x-pack/plugins/triggers_actions_ui/public/index.ts @@ -19,6 +19,7 @@ export { ActionType, ActionTypeRegistryContract, AlertTypeParamsExpressionProps, + ActionVariable, } from './types'; export { ConnectorAddFlyout, diff --git a/x-pack/plugins/triggers_actions_ui/public/types.ts b/x-pack/plugins/triggers_actions_ui/public/types.ts index dd2b070956dbc..a42a9f56a751f 100644 --- a/x-pack/plugins/triggers_actions_ui/public/types.ts +++ b/x-pack/plugins/triggers_actions_ui/public/types.ts @@ -41,7 +41,7 @@ export interface ActionParamsProps { index: number; editAction: (property: string, value: any, index: number) => void; errors: IErrorObject; - messageVariables?: string[]; + messageVariables?: ActionVariable[]; defaultMessage?: string; docLinks: DocLinksStart; } @@ -94,6 +94,7 @@ export interface ActionVariable { export interface ActionVariables { context: ActionVariable[]; state: ActionVariable[]; + params: ActionVariable[]; } export interface AlertType { diff --git a/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.test.ts b/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.test.ts index 713d0cb85e2e8..525c3781be749 100644 --- a/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.test.ts +++ b/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.test.ts @@ -3,8 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - -import Boom from 'boom'; +import { SavedObjectsErrorHelpers } from '../../../../../../src/core/server'; import moment from 'moment'; import { @@ -27,7 +26,7 @@ describe('ReindexActions', () => { beforeEach(() => { client = { - errors: null, + errors: SavedObjectsErrorHelpers, create: jest.fn(unimplemented('create')), bulkCreate: jest.fn(unimplemented('bulkCreate')), delete: jest.fn(unimplemented('delete')), @@ -306,7 +305,7 @@ describe('ReindexActions', () => { describe(`IndexConsumerType.${typeKey}`, () => { it('creates the lock doc if it does not exist and executes callback', async () => { expect.assertions(3); - client.get.mockRejectedValueOnce(Boom.notFound()); // mock no ML doc exists yet + client.get.mockRejectedValueOnce(SavedObjectsErrorHelpers.createGenericNotFoundError()); // mock no ML doc exists yet client.create.mockImplementationOnce((type: any, attributes: any, { id }: any) => Promise.resolve({ type, diff --git a/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.ts b/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.ts index 54f9fe9d298f2..6d8afee1ff950 100644 --- a/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.ts +++ b/x-pack/plugins/upgrade_assistant/server/lib/reindexing/reindex_actions.ts @@ -253,7 +253,7 @@ export const reindexActionsFactory = ( // The IndexGroup enum value (a string) serves as the ID of the lock doc return await client.get(REINDEX_OP_TYPE, indexGroup); } catch (e) { - if (e.isBoom && e.output.statusCode === 404) { + if (client.errors.isNotFoundError(e)) { return await client.create( REINDEX_OP_TYPE, { diff --git a/x-pack/plugins/uptime/server/lib/requests/get_ping_histogram.ts b/x-pack/plugins/uptime/server/lib/requests/get_ping_histogram.ts index a74b55c24e227..970d9ad166982 100644 --- a/x-pack/plugins/uptime/server/lib/requests/get_ping_histogram.ts +++ b/x-pack/plugins/uptime/server/lib/requests/get_ping_histogram.ts @@ -8,6 +8,7 @@ import { UMElasticsearchQueryFn } from '../adapters'; import { getFilterClause } from '../helper'; import { HistogramResult, HistogramQueryResult } from '../../../common/runtime_types'; import { QUERY } from '../../../common/constants'; +import { getHistogramInterval } from '../helper/get_histogram_interval'; export interface GetPingHistogramParams { /** @member dateRangeStart timestamp bounds */ @@ -36,22 +37,6 @@ export const getPingHistogram: UMElasticsearchQueryFn< } const filter = getFilterClause(from, to, additionalFilters); - const seriesHistogram: any = {}; - - if (bucketSize) { - seriesHistogram.date_histogram = { - field: '@timestamp', - fixed_interval: bucketSize, - missing: 0, - }; - } else { - seriesHistogram.auto_date_histogram = { - field: '@timestamp', - buckets: QUERY.DEFAULT_BUCKET_COUNT, - missing: 0, - }; - } - const params = { index: dynamicSettings.heartbeatIndices, body: { @@ -63,7 +48,12 @@ export const getPingHistogram: UMElasticsearchQueryFn< size: 0, aggs: { timeseries: { - ...seriesHistogram, + date_histogram: { + field: '@timestamp', + fixed_interval: + bucketSize || getHistogramInterval(from, to, QUERY.DEFAULT_BUCKET_COUNT) + 'ms', + missing: 0, + }, aggs: { down: { filter: { diff --git a/x-pack/test/accessibility/apps/uptime.ts b/x-pack/test/accessibility/apps/uptime.ts index ebd120fa0feea..e6ef1cfe8cfe2 100644 --- a/x-pack/test/accessibility/apps/uptime.ts +++ b/x-pack/test/accessibility/apps/uptime.ts @@ -17,7 +17,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { const esArchiver = getService('esArchiver'); const es = getService('es'); - describe('uptime', () => { + // FLAKY: https://github.com/elastic/kibana/issues/72994 + describe.skip('uptime', () => { before(async () => { await esArchiver.load('uptime/blank'); await makeChecks(es, A11Y_TEST_MONITOR_ID, 150, 1, 1000, { diff --git a/x-pack/test/alerting_api_integration/common/fixtures/plugins/alerts/server/alert_types.ts b/x-pack/test/alerting_api_integration/common/fixtures/plugins/alerts/server/alert_types.ts index 26010e5a2c2e8..ebf639067518f 100644 --- a/x-pack/test/alerting_api_integration/common/fixtures/plugins/alerts/server/alert_types.ts +++ b/x-pack/test/alerting_api_integration/common/fixtures/plugins/alerts/server/alert_types.ts @@ -26,6 +26,7 @@ export function defineAlertTypes( defaultActionGroupId: 'default', actionVariables: { state: [{ name: 'instanceStateValue', description: 'the instance state value' }], + params: [{ name: 'instanceParamsValue', description: 'the instance params value' }], context: [{ name: 'instanceContextValue', description: 'the instance context value' }], }, async executor(alertExecutorOptions: AlertExecutorOptions) { diff --git a/x-pack/test/alerting_api_integration/security_and_spaces/tests/alerting/list_alert_types.ts b/x-pack/test/alerting_api_integration/security_and_spaces/tests/alerting/list_alert_types.ts index c3e5af0d1f771..ad60ed6941caf 100644 --- a/x-pack/test/alerting_api_integration/security_and_spaces/tests/alerting/list_alert_types.ts +++ b/x-pack/test/alerting_api_integration/security_and_spaces/tests/alerting/list_alert_types.ts @@ -22,6 +22,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { actionVariables: { state: [], context: [], + params: [], }, producer: 'alertsFixture', }; @@ -34,6 +35,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { actionVariables: { state: [], context: [], + params: [], }, producer: 'alertsRestrictedFixture', }; diff --git a/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/list_alert_types.ts b/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/list_alert_types.ts index dd09a14b4cb81..6fb573c7344b3 100644 --- a/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/list_alert_types.ts +++ b/x-pack/test/alerting_api_integration/spaces_only/tests/alerting/list_alert_types.ts @@ -29,6 +29,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { name: 'Test: Noop', actionVariables: { state: [], + params: [], context: [], }, producer: 'alertsFixture', @@ -48,6 +49,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { expect(fixtureAlertType.actionVariables).to.eql({ state: [{ name: 'instanceStateValue', description: 'the instance state value' }], + params: [{ name: 'instanceParamsValue', description: 'the instance params value' }], context: [{ name: 'instanceContextValue', description: 'the instance context value' }], }); }); @@ -64,6 +66,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { expect(fixtureAlertType.actionVariables).to.eql({ state: [], + params: [], context: [{ name: 'aContextVariable', description: 'this is a context variable' }], }); }); @@ -81,6 +84,7 @@ export default function listAlertTypes({ getService }: FtrProviderContext) { expect(fixtureAlertType.actionVariables).to.eql({ state: [{ name: 'aStateVariable', description: 'this is a state variable' }], context: [], + params: [], }); }); }); diff --git a/x-pack/test/api_integration/apis/management/ingest_pipelines/ingest_pipelines.ts b/x-pack/test/api_integration/apis/management/ingest_pipelines/ingest_pipelines.ts index a48460d7a3b23..6a827298521dd 100644 --- a/x-pack/test/api_integration/apis/management/ingest_pipelines/ingest_pipelines.ts +++ b/x-pack/test/api_integration/apis/management/ingest_pipelines/ingest_pipelines.ts @@ -16,7 +16,7 @@ export default function ({ getService }: FtrProviderContext) { const { createPipeline, deletePipeline } = registerEsHelpers(getService); - describe('Pipelines', function () { + describe.skip('Pipelines', function () { describe('Create', () => { const PIPELINE_ID = 'test_create_pipeline'; const REQUIRED_FIELDS_PIPELINE_ID = 'test_create_required_fields_pipeline'; diff --git a/x-pack/test/api_integration/apis/ml/calendars/create_calendars.ts b/x-pack/test/api_integration/apis/ml/calendars/create_calendars.ts new file mode 100644 index 0000000000000..f163df0109ffd --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/create_calendars.ts @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + describe('create_calendars', function () { + const calendarId = `test_create_calendar`; + + const requestBody = { + calendarId, + job_ids: ['test_job_1', 'test_job_2'], + description: 'Test calendar', + events: [ + { description: 'event 1', start_time: 1513641600000, end_time: 1513728000000 }, + { description: 'event 2', start_time: 1513814400000, end_time: 1513900800000 }, + { description: 'event 3', start_time: 1514160000000, end_time: 1514246400000 }, + ], + }; + + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + }); + + afterEach(async () => { + await ml.api.deleteCalendar(calendarId); + }); + + it('should successfully create calendar by id', async () => { + await supertest + .put(`/api/ml/calendars`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .send(requestBody) + .expect(200); + + const results = await ml.api.getCalendar(requestBody.calendarId); + const createdCalendar = results.body.calendars[0]; + + expect(createdCalendar.calendar_id).to.eql(requestBody.calendarId); + expect(createdCalendar.description).to.eql(requestBody.description); + expect(createdCalendar.job_ids).to.eql(requestBody.job_ids); + + await ml.api.waitForEventsToExistInCalendar(calendarId, requestBody.events); + }); + + it('should not create new calendar for user without required permission', async () => { + const { body } = await supertest + .put(`/api/ml/calendars`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .send(requestBody) + .expect(404); + + expect(body.error).to.eql('Not Found'); + expect(body.message).to.eql('Not Found'); + await ml.api.waitForCalendarNotToExist(calendarId); + }); + + it('should not create new calendar for unauthorized user', async () => { + const { body } = await supertest + .put(`/api/ml/calendars`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .send(requestBody) + .expect(404); + + expect(body.error).to.eql('Not Found'); + expect(body.message).to.eql('Not Found'); + await ml.api.waitForCalendarNotToExist(calendarId); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/calendars/delete_calendars.ts b/x-pack/test/api_integration/apis/ml/calendars/delete_calendars.ts new file mode 100644 index 0000000000000..5c5d5a3c432fa --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/delete_calendars.ts @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + describe('delete_calendars', function () { + const calendarId = `test_delete_cal`; + const testCalendar = { + calendar_id: calendarId, + job_ids: ['test_job_1', 'test_job_2'], + description: `Test calendar`, + }; + const testEvents = [ + { description: 'event 1', start_time: 1513641600000, end_time: 1513728000000 }, + { description: 'event 2', start_time: 1513814400000, end_time: 1513900800000 }, + { description: 'event 3', start_time: 1514160000000, end_time: 1514246400000 }, + ]; + + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + }); + + beforeEach(async () => { + await ml.api.createCalendar(calendarId, testCalendar); + await ml.api.createCalendarEvents(calendarId, testEvents); + }); + + afterEach(async () => { + await ml.api.deleteCalendar(calendarId); + }); + + it('should delete calendar by id', async () => { + const { body } = await supertest + .delete(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body.acknowledged).to.eql(true); + await ml.api.waitForCalendarNotToExist(calendarId); + }); + + it('should not delete calendar for user without required permission', async () => { + const { body } = await supertest + .delete(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + await ml.api.waitForCalendarToExist(calendarId); + }); + + it('should not delete calendar for unauthorized user', async () => { + const { body } = await supertest + .delete(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + await ml.api.waitForCalendarToExist(calendarId); + }); + + it('should return 404 if invalid calendarId', async () => { + const { body } = await supertest + .delete(`/api/ml/calendars/calendar_id_dne`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/calendars/get_calendars.ts b/x-pack/test/api_integration/apis/ml/calendars/get_calendars.ts new file mode 100644 index 0000000000000..e115986b2f092 --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/get_calendars.ts @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + describe('get_calendars', function () { + const testEvents = [ + { description: 'event 1', start_time: 1513641600000, end_time: 1513728000000 }, + { description: 'event 2', start_time: 1513814400000, end_time: 1513900800000 }, + { description: 'event 3', start_time: 1514160000000, end_time: 1514246400000 }, + ]; + + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + }); + + describe('get multiple calendars', function () { + const testCalendars = [1, 2, 3].map((num) => ({ + calendar_id: `test_get_cal_${num}`, + job_ids: ['test_job_1', 'test_job_2'], + description: `Test calendar ${num}`, + })); + + beforeEach(async () => { + for (const testCalendar of testCalendars) { + await ml.api.createCalendar(testCalendar.calendar_id, testCalendar); + await ml.api.createCalendarEvents(testCalendar.calendar_id, testEvents); + } + }); + + afterEach(async () => { + for (const testCalendar of testCalendars) { + await ml.api.deleteCalendar(testCalendar.calendar_id); + } + }); + + it('should fetch all calendars', async () => { + const { body } = await supertest + .get(`/api/ml/calendars`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body).to.have.length(testCalendars.length); + expect(body[0].events).to.have.length(testEvents.length); + ml.api.assertAllEventsExistInCalendar(testEvents, body[0]); + }); + + it('should fetch all calendars for user with view permission', async () => { + const { body } = await supertest + .get(`/api/ml/calendars`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body).to.have.length(testCalendars.length); + expect(body[0].events).to.have.length(testEvents.length); + ml.api.assertAllEventsExistInCalendar(testEvents, body[0]); + }); + + it('should not fetch calendars for unauthorized user', async () => { + const { body } = await supertest + .get(`/api/ml/calendars`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + expect(body.error).to.eql('Not Found'); + }); + }); + + describe('get calendar by id', function () { + const calendarId = `test_get_cal`; + const testCalendar = { + calendar_id: calendarId, + job_ids: ['test_job_1', 'test_job_2'], + description: `Test calendar`, + }; + + beforeEach(async () => { + await ml.api.createCalendar(calendarId, testCalendar); + await ml.api.createCalendarEvents(calendarId, testEvents); + }); + + afterEach(async () => { + await ml.api.deleteCalendar(calendarId); + }); + + it('should fetch calendar & associated events by id', async () => { + const { body } = await supertest + .get(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body.job_ids).to.eql(testCalendar.job_ids); + expect(body.description).to.eql(testCalendar.description); + expect(body.events).to.have.length(testEvents.length); + ml.api.assertAllEventsExistInCalendar(testEvents, body); + }); + + it('should fetch calendar & associated events by id for user with view permission', async () => { + const { body } = await supertest + .get(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body.job_ids).to.eql(testCalendar.job_ids); + expect(body.description).to.eql(testCalendar.description); + expect(body.events).to.have.length(testEvents.length); + ml.api.assertAllEventsExistInCalendar(testEvents, body); + }); + + it('should not fetch calendars for unauthorized user', async () => { + const { body } = await supertest + .get(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + }); + }); + + it('should return 404 if invalid calendar id', async () => { + const { body } = await supertest + .get(`/api/ml/calendars/calendar_id_dne`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + expect(body.error).to.eql('Not Found'); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/calendars/helpers.ts b/x-pack/test/api_integration/apis/ml/calendars/helpers.ts new file mode 100644 index 0000000000000..5d143d9b451f2 --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/helpers.ts @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Calendar, CalendarEvent } from '../../../../../plugins/ml/server/models/calendar'; + +export const assertAllEventsExistInCalendar = ( + eventsToCheck: CalendarEvent[], + calendar: Calendar +): boolean => { + const updatedCalendarEvents = calendar.events as CalendarEvent[]; + let allEventsAreUpdated = true; + for (const eventToCheck of eventsToCheck) { + // if at least one of the events that we need to check is not in the updated events + // no need to continue + if ( + updatedCalendarEvents.findIndex( + (updatedEvent) => + updatedEvent.description === eventToCheck.description && + updatedEvent.start_time === eventToCheck.start_time && + updatedEvent.end_time === eventToCheck.end_time + ) < 0 + ) { + allEventsAreUpdated = false; + break; + } + } + return allEventsAreUpdated; +}; diff --git a/x-pack/test/api_integration/apis/ml/calendars/index.ts b/x-pack/test/api_integration/apis/ml/calendars/index.ts new file mode 100644 index 0000000000000..e7d824205e6cc --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/index.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../../../ftr_provider_context'; + +export default function ({ loadTestFile }: FtrProviderContext) { + describe('calendars', function () { + loadTestFile(require.resolve('./create_calendars')); + loadTestFile(require.resolve('./get_calendars')); + loadTestFile(require.resolve('./delete_calendars')); + loadTestFile(require.resolve('./update_calendars')); + }); +} diff --git a/x-pack/test/api_integration/apis/ml/calendars/update_calendars.ts b/x-pack/test/api_integration/apis/ml/calendars/update_calendars.ts new file mode 100644 index 0000000000000..5194370b19e66 --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/calendars/update_calendars.ts @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + describe('update_calendars', function () { + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + }); + + const calendarId = `test_update_cal`; + const originalCalendar = { + calendar_id: calendarId, + job_ids: ['test_job_1'], + description: `Test calendar`, + }; + const originalEvents = [ + { description: 'event 1', start_time: 1513641600000, end_time: 1513728000000 }, + ]; + + const updateCalendarRequestBody = { + calendarId, + job_ids: ['test_updated_job_1', 'test_updated_job_2'], + description: 'Updated calendar #1', + events: [ + { description: 'updated event 2', start_time: 1513814400000, end_time: 1513900800000 }, + { description: 'updated event 3', start_time: 1514160000000, end_time: 1514246400000 }, + ], + }; + + beforeEach(async () => { + await ml.api.createCalendar(calendarId, originalCalendar); + await ml.api.createCalendarEvents(calendarId, originalEvents); + }); + + afterEach(async () => { + await ml.api.deleteCalendar(calendarId); + }); + + it('should update calendar by id with new settings', async () => { + await supertest + .put(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateCalendarRequestBody) + .expect(200); + + await ml.api.waitForCalendarToExist(calendarId); + + const getCalendarResult = await ml.api.getCalendar(calendarId); + const getEventsResult = await ml.api.getCalendarEvents(calendarId); + + const updatedCalendar = getCalendarResult.body.calendars[0]; + const updatedEvents = getEventsResult.body.events; + + expect(updatedCalendar.calendar_id).to.eql(updateCalendarRequestBody.calendarId); + expect(updatedCalendar.job_ids).to.have.length(updateCalendarRequestBody.job_ids.length); + expect(updatedEvents).to.have.length(updateCalendarRequestBody.events.length); + await ml.api.waitForEventsToExistInCalendar( + updatedCalendar.calendar_id, + updateCalendarRequestBody.events + ); + }); + + it('should not allow to update calendar for user without required permission ', async () => { + await supertest + .put(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateCalendarRequestBody) + .expect(404); + }); + + it('should not allow to update calendar for unauthorized user', async () => { + await supertest + .put(`/api/ml/calendars/${calendarId}`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .send(updateCalendarRequestBody) + .expect(404); + }); + + it('should return error if invalid calendarId ', async () => { + await supertest + .put(`/api/ml/calendars/calendar_id_dne`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateCalendarRequestBody) + .expect(404); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/filters/create_filters.ts b/x-pack/test/api_integration/apis/ml/filters/create_filters.ts new file mode 100644 index 0000000000000..c175d3a9a3d9c --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/filters/create_filters.ts @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + const testDataList = [ + { + testTitle: 'should successfully create new filter', + user: USER.ML_POWERUSER, + requestBody: { filterId: 'safe_ip_addresses', description: '', items: ['104.236.210.185'] }, + expected: { + responseCode: 200, + responseBody: { + filter_id: 'safe_ip_addresses', + description: '', + items: ['104.236.210.185'], + }, + }, + }, + { + testTitle: 'should not create new filter for user without required permission', + user: USER.ML_VIEWER, + requestBody: { + filterId: 'safe_ip_addresses_view_only', + + description: '', + items: ['104.236.210.185'], + }, + expected: { + responseCode: 404, + responseBody: { + error: 'Not Found', + message: 'Not Found', + }, + }, + }, + { + testTitle: 'should not create new filter for unauthorized user', + user: USER.ML_UNAUTHORIZED, + requestBody: { + filterId: 'safe_ip_addresses_unauthorized', + description: '', + items: ['104.236.210.185'], + }, + expected: { + responseCode: 404, + responseBody: { + error: 'Not Found', + message: 'Not Found', + }, + }, + }, + { + testTitle: 'should return 400 bad request if invalid filterId', + user: USER.ML_POWERUSER, + requestBody: { + filterId: '@invalid_filter_id', + description: '', + items: ['104.236.210.185'], + }, + expected: { + responseCode: 400, + responseBody: { + error: 'Bad Request', + message: 'Invalid filter_id', + }, + }, + }, + { + testTitle: 'should return 400 bad request if invalid items', + user: USER.ML_POWERUSER, + requestBody: { filterId: 'valid_filter', description: '' }, + expected: { + responseCode: 400, + responseBody: { + error: 'Bad Request', + message: 'expected value of type [array] but got [undefined]', + }, + }, + }, + ]; + + describe('create_filters', function () { + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + }); + + after(async () => { + for (const testData of testDataList) { + const { filterId } = testData.requestBody; + await ml.api.deleteFilter(filterId); + } + }); + + for (const testData of testDataList) { + const { testTitle, user, requestBody, expected } = testData; + it(`${testTitle}`, async () => { + const { body } = await supertest + .put(`/api/ml/filters`) + .auth(user, ml.securityCommon.getPasswordForUser(user)) + .set(COMMON_REQUEST_HEADERS) + .send(requestBody) + .expect(expected.responseCode); + if (body.error === undefined) { + // Validate the important parts of the response. + const expectedResponse = testData.expected.responseBody; + expect(body).to.eql(expectedResponse); + } else { + expect(body.error).to.contain(expected.responseBody.error); + expect(body.message).to.contain(expected.responseBody.message); + } + }); + } + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/filters/delete_filters.ts b/x-pack/test/api_integration/apis/ml/filters/delete_filters.ts new file mode 100644 index 0000000000000..bb83a7f720692 --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/filters/delete_filters.ts @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + const items = ['104.236.210.185']; + const validFilters = [ + { + filterId: 'filter_power', + requestBody: { description: 'Test delete filter #1', items }, + }, + { + filterId: 'filter_viewer', + requestBody: { description: 'Test delete filter (viewer)', items }, + }, + { + filterId: 'filter_unauthorized', + requestBody: { description: 'Test delete filter (unauthorized)', items }, + }, + ]; + + describe('delete_filters', function () { + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + for (const filter of validFilters) { + const { filterId, requestBody } = filter; + await ml.api.createFilter(filterId, requestBody); + } + }); + + after(async () => { + for (const filter of validFilters) { + const { filterId } = filter; + await ml.api.deleteFilter(filterId); + } + }); + + it(`should delete filter by id`, async () => { + const { filterId } = validFilters[0]; + const { body } = await supertest + .delete(`/api/ml/filters/${filterId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body.acknowledged).to.eql(true); + await ml.api.waitForFilterToNotExist(filterId); + }); + + it(`should not delete filter for user without required permission`, async () => { + const { filterId } = validFilters[1]; + const { body } = await supertest + .delete(`/api/ml/filters/${filterId}`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + await ml.api.waitForFilterToExist(filterId); + }); + + it(`should not delete filter for unauthorized user`, async () => { + const { filterId } = validFilters[2]; + const { body } = await supertest + .delete(`/api/ml/filters/${filterId}`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + await ml.api.waitForFilterToExist(filterId); + }); + + it(`should not allow user to delete filter if invalid filterId`, async () => { + const { body } = await supertest + .delete(`/api/ml/filters/filter_id_dne`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + expect(body.error).to.eql('Not Found'); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/filters/get_filters.ts b/x-pack/test/api_integration/apis/ml/filters/get_filters.ts new file mode 100644 index 0000000000000..3dd6093a9917f --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/filters/get_filters.ts @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + const validFilters = [ + { + filterId: 'filter_1', + requestBody: { description: 'Valid filter #1', items: ['104.236.210.185'] }, + }, + { + filterId: 'filter_2', + requestBody: { description: 'Valid filter #2', items: ['104.236.210.185'] }, + }, + ]; + + describe('get_filters', function () { + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + for (const filter of validFilters) { + const { filterId, requestBody } = filter; + await ml.api.createFilter(filterId, requestBody); + } + }); + + after(async () => { + for (const filter of validFilters) { + const { filterId } = filter; + await ml.api.deleteFilter(filterId); + } + }); + it(`should fetch all filters`, async () => { + const { body } = await supertest + .get(`/api/ml/filters`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body).to.have.length(validFilters.length); + }); + + it(`should not allow to retrieve filters for user without required permission`, async () => { + const { body } = await supertest + .get(`/api/ml/filters`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + expect(body.error).to.eql('Not Found'); + expect(body.message).to.eql('Not Found'); + }); + + it(`should not allow to retrieve filters for unauthorized user`, async () => { + const { body } = await supertest + .get(`/api/ml/filters`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .expect(404); + + expect(body.error).to.eql('Not Found'); + expect(body.message).to.eql('Not Found'); + }); + + it(`should fetch single filter by id`, async () => { + const { filterId, requestBody } = validFilters[0]; + const { body } = await supertest + .get(`/api/ml/filters/${filterId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(200); + + expect(body.filter_id).to.eql(filterId); + expect(body.description).to.eql(requestBody.description); + expect(body.items).to.eql(requestBody.items); + }); + + it(`should return 400 if filterId does not exist`, async () => { + const { body } = await supertest + .get(`/api/ml/filters/filter_id_dne`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .expect(400); + expect(body.error).to.eql('Bad Request'); + expect(body.message).to.contain('Unable to find filter'); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/filters/index.ts b/x-pack/test/api_integration/apis/ml/filters/index.ts new file mode 100644 index 0000000000000..0c0bc4eab29ec --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/filters/index.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../../../ftr_provider_context'; + +export default function ({ loadTestFile }: FtrProviderContext) { + describe('filters', function () { + loadTestFile(require.resolve('./create_filters')); + loadTestFile(require.resolve('./get_filters')); + loadTestFile(require.resolve('./delete_filters')); + loadTestFile(require.resolve('./update_filters')); + }); +} diff --git a/x-pack/test/api_integration/apis/ml/filters/update_filters.ts b/x-pack/test/api_integration/apis/ml/filters/update_filters.ts new file mode 100644 index 0000000000000..eb58d545093c4 --- /dev/null +++ b/x-pack/test/api_integration/apis/ml/filters/update_filters.ts @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +import { FtrProviderContext } from '../../../ftr_provider_context'; +import { USER } from '../../../../functional/services/ml/security_common'; +import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common'; + +// eslint-disable-next-line import/no-default-export +export default ({ getService }: FtrProviderContext) => { + const supertest = getService('supertestWithoutAuth'); + const ml = getService('ml'); + + const items = ['104.236.210.185']; + const validFilters = [ + { + filterId: 'filter_power', + requestBody: { description: 'Test update filter #1', items }, + }, + { + filterId: 'filter_viewer', + requestBody: { description: 'Test update filter (viewer)', items }, + }, + { + filterId: 'filter_unauthorized', + requestBody: { description: 'Test update filter (unauthorized)', items }, + }, + ]; + + describe('update_filters', function () { + const updateFilterRequestBody = { + description: 'Updated filter #1', + removeItems: items, + addItems: ['my_new_items_1', 'my_new_items_2'], + }; + before(async () => { + await ml.testResources.setKibanaTimeZoneToUTC(); + for (const filter of validFilters) { + const { filterId, requestBody } = filter; + await ml.api.createFilter(filterId, requestBody); + } + }); + + after(async () => { + for (const filter of validFilters) { + const { filterId } = filter; + await ml.api.deleteFilter(filterId); + } + }); + + it(`should update filter by id`, async () => { + const { filterId } = validFilters[0]; + const { body } = await supertest + .put(`/api/ml/filters/${filterId}`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateFilterRequestBody) + .expect(200); + + expect(body.filter_id).to.eql(filterId); + expect(body.description).to.eql(updateFilterRequestBody.description); + expect(body.items).to.eql(updateFilterRequestBody.addItems); + }); + + it(`should not allow to update filter for user without required permission`, async () => { + const { filterId, requestBody: oldFilterRequest } = validFilters[1]; + const { body } = await supertest + .put(`/api/ml/filters/${filterId}`) + .auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateFilterRequestBody) + .expect(404); + + // response should return not found + expect(body.error).to.eql('Not Found'); + + // and the filter should not be updated + const response = await ml.api.getFilter(filterId); + const updatedFilter = response.body.filters[0]; + expect(updatedFilter.filter_id).to.eql(filterId); + expect(updatedFilter.description).to.eql(oldFilterRequest.description); + expect(updatedFilter.items).to.eql(oldFilterRequest.items); + }); + + it(`should not allow to update filter for unauthorized user`, async () => { + const { filterId, requestBody: oldFilterRequest } = validFilters[2]; + const { body } = await supertest + .put(`/api/ml/filters/${filterId}`) + .auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED)) + .set(COMMON_REQUEST_HEADERS) + .send(updateFilterRequestBody) + .expect(404); + + expect(body.error).to.eql('Not Found'); + + const response = await ml.api.getFilter(filterId); + const updatedFilter = response.body.filters[0]; + expect(updatedFilter.filter_id).to.eql(filterId); + expect(updatedFilter.description).to.eql(oldFilterRequest.description); + expect(updatedFilter.items).to.eql(oldFilterRequest.items); + }); + + it(`should return appropriate error if invalid filterId`, async () => { + const { body } = await supertest + .put(`/api/ml/filters/filter_id_dne`) + .auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER)) + .set(COMMON_REQUEST_HEADERS) + .send(updateFilterRequestBody) + .expect(400); + + expect(body.message).to.contain('No filter with id'); + }); + }); +}; diff --git a/x-pack/test/api_integration/apis/ml/index.ts b/x-pack/test/api_integration/apis/ml/index.ts index 5c2e7a6c4b2f7..b29bc47b50394 100644 --- a/x-pack/test/api_integration/apis/ml/index.ts +++ b/x-pack/test/api_integration/apis/ml/index.ts @@ -58,5 +58,7 @@ export default function ({ getService, loadTestFile }: FtrProviderContext) { loadTestFile(require.resolve('./jobs')); loadTestFile(require.resolve('./results')); loadTestFile(require.resolve('./data_frame_analytics')); + loadTestFile(require.resolve('./filters')); + loadTestFile(require.resolve('./calendars')); }); } diff --git a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram.json b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram.json index 562ba64c24b0b..85ce545ed92b0 100644 --- a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram.json +++ b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram.json @@ -1,121 +1,157 @@ { "histogram": [ { - "x": 1568172664000, + "x": 1568172657286, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172694000, + "x": 1568172680087, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172724000, + "x": 1568172702888, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172754000, + "x": 1568172725689, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172748490, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172784000, + "x": 1568172771291, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172814000, + "x": 1568172794092, "downCount": 8, "upCount": 92, "y": 1 }, { - "x": 1568172844000, + "x": 1568172816893, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172839694, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172874000, + "x": 1568172862495, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172904000, + "x": 1568172885296, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172934000, + "x": 1568172908097, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172930898, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172964000, + "x": 1568172953699, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568172994000, + "x": 1568172976500, "downCount": 8, "upCount": 92, "y": 1 }, { - "x": 1568173024000, + "x": 1568172999301, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173022102, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173054000, + "x": 1568173044903, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173084000, + "x": 1568173067704, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173114000, + "x": 1568173090505, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173113306, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173144000, + "x": 1568173136107, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173174000, + "x": 1568173158908, "downCount": 8, "upCount": 92, "y": 1 }, { - "x": 1568173204000, + "x": 1568173181709, "downCount": 7, "upCount": 93, "y": 1 }, { - "x": 1568173234000, + "x": 1568173204510, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173227311, "downCount": 7, "upCount": 93, "y": 1 diff --git a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_filter.json b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_filter.json index 42be715c4acd4..fe5dc9dd3da3f 100644 --- a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_filter.json +++ b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_filter.json @@ -1,121 +1,157 @@ { "histogram": [ { - "x": 1568172664000, + "x": 1568172657286, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172694000, + "x": 1568172680087, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172724000, + "x": 1568172702888, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172754000, + "x": 1568172725689, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172748490, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172784000, + "x": 1568172771291, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172814000, + "x": 1568172794092, "downCount": 0, "upCount": 92, "y": 1 }, { - "x": 1568172844000, + "x": 1568172816893, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172839694, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172874000, + "x": 1568172862495, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172904000, + "x": 1568172885296, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172934000, + "x": 1568172908097, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172930898, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172964000, + "x": 1568172953699, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568172994000, + "x": 1568172976500, "downCount": 0, "upCount": 92, "y": 1 }, { - "x": 1568173024000, + "x": 1568172999301, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173022102, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173054000, + "x": 1568173044903, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173084000, + "x": 1568173067704, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173114000, + "x": 1568173090505, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173113306, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173144000, + "x": 1568173136107, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173174000, + "x": 1568173158908, "downCount": 0, "upCount": 92, "y": 1 }, { - "x": 1568173204000, + "x": 1568173181709, "downCount": 0, "upCount": 93, "y": 1 }, { - "x": 1568173234000, + "x": 1568173204510, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173227311, "downCount": 0, "upCount": 93, "y": 1 diff --git a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_id.json b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_id.json index 9a726db616325..e54738cf5dbd7 100644 --- a/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_id.json +++ b/x-pack/test/api_integration/apis/uptime/rest/fixtures/ping_histogram_by_id.json @@ -1,121 +1,157 @@ { "histogram": [ { - "x": 1568172664000, + "x": 1568172657286, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172694000, + "x": 1568172680087, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172724000, + "x": 1568172702888, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172754000, + "x": 1568172725689, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172748490, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172784000, + "x": 1568172771291, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172814000, + "x": 1568172794092, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172844000, + "x": 1568172816893, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172839694, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172874000, + "x": 1568172862495, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172904000, + "x": 1568172885296, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172934000, + "x": 1568172908097, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568172930898, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172964000, + "x": 1568172953699, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568172994000, + "x": 1568172976500, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173024000, + "x": 1568172999301, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173022102, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173054000, + "x": 1568173044903, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173084000, + "x": 1568173067704, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173114000, + "x": 1568173090505, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173113306, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173144000, + "x": 1568173136107, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173174000, + "x": 1568173158908, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173204000, + "x": 1568173181709, "downCount": 0, "upCount": 1, "y": 1 }, { - "x": 1568173234000, + "x": 1568173204510, + "downCount": 0, + "upCount": 0, + "y": 1 + }, + { + "x": 1568173227311, "downCount": 0, "upCount": 1, "y": 1 diff --git a/x-pack/test/api_integration/apis/uptime/rest/ping_histogram.ts b/x-pack/test/api_integration/apis/uptime/rest/ping_histogram.ts index ffcb1a829f0f8..b2504e3b921f7 100644 --- a/x-pack/test/api_integration/apis/uptime/rest/ping_histogram.ts +++ b/x-pack/test/api_integration/apis/uptime/rest/ping_histogram.ts @@ -6,7 +6,6 @@ import { expectFixtureEql } from './helper/expect_fixture_eql'; import { FtrProviderContext } from '../../../ftr_provider_context'; -import { assertCloseTo } from '../../../../../plugins/uptime/server/lib/helper'; export default function ({ getService }: FtrProviderContext) { describe('pingHistogram', () => { @@ -21,10 +20,6 @@ export default function ({ getService }: FtrProviderContext) { ); const data = apiResponse.body; - // manually testing this value and then removing it to avoid flakiness - const { interval } = data; - assertCloseTo(interval, 22801, 100); - delete data.interval; expectFixtureEql(data, 'ping_histogram'); }); @@ -38,9 +33,6 @@ export default function ({ getService }: FtrProviderContext) { ); const data = apiResponse.body; - const { interval } = data; - assertCloseTo(interval, 22801, 100); - delete data.interval; expectFixtureEql(data, 'ping_histogram_by_id'); }); @@ -55,9 +47,6 @@ export default function ({ getService }: FtrProviderContext) { ); const data = apiResponse.body; - const { interval } = data; - assertCloseTo(interval, 22801, 100); - delete data.interval; expectFixtureEql(data, 'ping_histogram_by_filter'); }); }); diff --git a/x-pack/test/functional/apps/dashboard_mode/dashboard_empty_screen.js b/x-pack/test/functional/apps/dashboard_mode/dashboard_empty_screen.js index c8a8f9653c11b..62e07a08d1762 100644 --- a/x-pack/test/functional/apps/dashboard_mode/dashboard_empty_screen.js +++ b/x-pack/test/functional/apps/dashboard_mode/dashboard_empty_screen.js @@ -98,5 +98,15 @@ export default function ({ getPageObjects, getService }) { const titles = await PageObjects.dashboard.getPanelTitles(); expect(titles.indexOf(newTitle)).to.not.be(-1); }); + + it('loses originatingApp connection after save as when redirectToOrigin is false', async () => { + const newTitle = 'wowee, my title just got cooler again'; + await PageObjects.dashboard.waitForRenderComplete(); + await dashboardPanelActions.openContextMenu(); + await dashboardPanelActions.clickEdit(); + await PageObjects.lens.save(newTitle, true, false); + await PageObjects.lens.notLinkedToOriginatingApp(); + await PageObjects.common.navigateToApp('dashboard'); + }); }); } diff --git a/x-pack/test/functional/apps/graph/graph.ts b/x-pack/test/functional/apps/graph/graph.ts index 803e5e8f80d70..c2500dca78444 100644 --- a/x-pack/test/functional/apps/graph/graph.ts +++ b/x-pack/test/functional/apps/graph/graph.ts @@ -13,8 +13,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { const esArchiver = getService('esArchiver'); const browser = getService('browser'); - // FLAKY: https://github.com/elastic/kibana/issues/53749 - describe.skip('graph', function () { + describe('graph', function () { before(async () => { await browser.setWindowSize(1600, 1000); log.debug('load graph/secrepo data'); @@ -132,14 +131,17 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { await buildGraph(); const { edges } = await PageObjects.graph.getGraphObjects(); - const blogAdminBlogEdge = edges.find( + await PageObjects.graph.isolateEdge('test', '/test/wp-admin/'); + + await PageObjects.graph.stopLayout(); + await PageObjects.common.sleep(1000); + const testTestWpAdminBlogEdge = edges.find( ({ sourceNode, targetNode }) => - sourceNode.label === '/blog/wp-admin/' && targetNode.label === 'blog' + targetNode.label === '/test/wp-admin/' && sourceNode.label === 'test' )!; - - await PageObjects.graph.isolateEdge(blogAdminBlogEdge); - - await PageObjects.graph.clickEdge(blogAdminBlogEdge); + await testTestWpAdminBlogEdge.element.click(); + await PageObjects.common.sleep(1000); + await PageObjects.graph.startLayout(); const vennTerm1 = await PageObjects.graph.getVennTerm1(); log.debug('vennTerm1 = ' + vennTerm1); @@ -156,11 +158,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) { const smallVennTerm2 = await PageObjects.graph.getSmallVennTerm2(); log.debug('smallVennTerm2 = ' + smallVennTerm2); - expect(vennTerm1).to.be('/blog/wp-admin/'); - expect(vennTerm2).to.be('blog'); - expect(smallVennTerm1).to.be('5'); - expect(smallVennTerm12).to.be(' (5) '); - expect(smallVennTerm2).to.be('8'); + expect(vennTerm1).to.be('/test/wp-admin/'); + expect(vennTerm2).to.be('test'); + expect(smallVennTerm1).to.be('4'); + expect(smallVennTerm12).to.be(' (4) '); + expect(smallVennTerm2).to.be('4'); }); it('should delete graph', async function () { diff --git a/x-pack/test/functional/apps/maps/index.js b/x-pack/test/functional/apps/maps/index.js index d0735aecda78b..4bbe38367d0a2 100644 --- a/x-pack/test/functional/apps/maps/index.js +++ b/x-pack/test/functional/apps/maps/index.js @@ -35,6 +35,7 @@ export default function ({ loadTestFile, getService }) { loadTestFile(require.resolve('./saved_object_management')); loadTestFile(require.resolve('./sample_data')); loadTestFile(require.resolve('./auto_fit_to_bounds')); + loadTestFile(require.resolve('./layer_visibility')); loadTestFile(require.resolve('./feature_controls/maps_security')); loadTestFile(require.resolve('./feature_controls/maps_spaces')); loadTestFile(require.resolve('./full_screen_mode')); diff --git a/x-pack/test/functional/apps/maps/layer_visibility.js b/x-pack/test/functional/apps/maps/layer_visibility.js new file mode 100644 index 0000000000000..22cff6de416c1 --- /dev/null +++ b/x-pack/test/functional/apps/maps/layer_visibility.js @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; + +export default function ({ getPageObjects, getService }) { + const PageObjects = getPageObjects(['maps']); + const inspector = getService('inspector'); + + describe('layer visibility', () => { + before(async () => { + await PageObjects.maps.loadSavedMap('document example hidden'); + }); + + afterEach(async () => { + await inspector.close(); + }); + + it('should not make any requests when layer is hidden', async () => { + const noRequests = await PageObjects.maps.doesInspectorHaveRequests(); + expect(noRequests).to.equal(true); + }); + + it('should fetch layer data when layer is made visible', async () => { + await PageObjects.maps.toggleLayerVisibility('logstash'); + const hits = await PageObjects.maps.getHits(); + expect(hits).to.equal('6'); + }); + }); +} diff --git a/x-pack/test/functional/apps/upgrade_assistant/upgrade_assistant.ts b/x-pack/test/functional/apps/upgrade_assistant/upgrade_assistant.ts index 85ad98727cea5..57b8fb23613be 100644 --- a/x-pack/test/functional/apps/upgrade_assistant/upgrade_assistant.ts +++ b/x-pack/test/functional/apps/upgrade_assistant/upgrade_assistant.ts @@ -11,14 +11,22 @@ export default function upgradeAssistantFunctionalTests({ getPageObjects, }: FtrProviderContext) { const esArchiver = getService('esArchiver'); - const PageObjects = getPageObjects(['upgradeAssistant']); + const PageObjects = getPageObjects(['upgradeAssistant', 'common']); + const security = getService('security'); + const log = getService('log'); describe('Upgrade Checkup', function () { this.tags('includeFirefox'); - before(async () => await esArchiver.load('empty_kibana')); + + before(async () => { + await esArchiver.load('empty_kibana'); + await security.testUser.setRoles(['global_upgrade_assistant_role']); + }); + after(async () => { await PageObjects.upgradeAssistant.expectTelemetryHasFinish(); await esArchiver.unload('empty_kibana'); + await security.testUser.restoreDefaults(); }); it('allows user to navigate to upgrade checkup', async () => { @@ -28,9 +36,17 @@ export default function upgradeAssistantFunctionalTests({ it('allows user to toggle deprecation logging', async () => { await PageObjects.upgradeAssistant.navigateToPage(); + log.debug('expect initial state to be ON'); await PageObjects.upgradeAssistant.expectDeprecationLoggingLabel('On'); + log.debug('Now toggle to off'); await PageObjects.upgradeAssistant.toggleDeprecationLogging(); + await PageObjects.common.sleep(2000); + log.debug('expect state to be OFF after toggle'); await PageObjects.upgradeAssistant.expectDeprecationLoggingLabel('Off'); + await PageObjects.upgradeAssistant.toggleDeprecationLogging(); + await PageObjects.common.sleep(2000); + log.debug('expect state to be ON after toggle'); + await PageObjects.upgradeAssistant.expectDeprecationLoggingLabel('On'); }); it('allows user to open cluster tab', async () => { diff --git a/x-pack/test/functional/apps/uptime/settings.ts b/x-pack/test/functional/apps/uptime/settings.ts index 1286a9940c02c..744b9120028d7 100644 --- a/x-pack/test/functional/apps/uptime/settings.ts +++ b/x-pack/test/functional/apps/uptime/settings.ts @@ -16,7 +16,6 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { const es = getService('es'); - // Flaky https://github.com/elastic/kibana/issues/60866 describe('uptime settings page', () => { beforeEach('navigate to clean app root', async () => { // make 10 checks diff --git a/x-pack/test/functional/config.js b/x-pack/test/functional/config.js index 5c13e430ae2ca..fdd694e73394e 100644 --- a/x-pack/test/functional/config.js +++ b/x-pack/test/functional/config.js @@ -284,6 +284,20 @@ export default async function ({ readConfigFile }) { ], }, + global_upgrade_assistant_role: { + elasticsearch: { + cluster: ['manage'], + }, + kibana: [ + { + feature: { + discover: ['read'], + }, + spaces: ['*'], + }, + ], + }, + global_ccr_role: { elasticsearch: { cluster: ['manage', 'manage_ccr'], diff --git a/x-pack/test/functional/es_archives/maps/kibana/data.json b/x-pack/test/functional/es_archives/maps/kibana/data.json index d2206009d9e65..7690c92589312 100644 --- a/x-pack/test/functional/es_archives/maps/kibana/data.json +++ b/x-pack/test/functional/es_archives/maps/kibana/data.json @@ -446,6 +446,37 @@ } } +{ + "type": "doc", + "value": { + "id": "map:2de4de10-cc82-11ea-9b0a-eb2886fc84af", + "index": ".kibana", + "source": { + "map": { + "title" : "document example hidden", + "description" : "", + "mapStateJSON" : "{\"zoom\":4.1,\"center\":{\"lon\":-100.61091,\"lat\":33.23887},\"timeFilters\":{\"from\":\"2015-09-20T00:00:00.000Z\",\"to\":\"2015-09-20T01:00:00.000Z\"},\"refreshConfig\":{\"isPaused\":true,\"interval\":1000},\"query\":{\"query\":\"\",\"language\":\"kuery\"},\"filters\":[],\"settings\":{\"autoFitToDataBounds\":false,\"initialLocation\":\"LAST_SAVED_LOCATION\",\"fixedLocation\":{\"lat\":0,\"lon\":0,\"zoom\":2},\"browserLocation\":{\"zoom\":2},\"maxZoom\":24,\"minZoom\":0,\"showSpatialFilters\":true,\"spatialFiltersAlpa\":0.3,\"spatialFiltersFillColor\":\"#DA8B45\",\"spatialFiltersLineColor\":\"#DA8B45\"}}", + "layerListJSON" : "[{\"id\":\"0hmz5\",\"sourceDescriptor\":{\"type\":\"EMS_TMS\",\"id\":\"road_map\"},\"visible\":true,\"temporary\":false,\"style\":{\"type\":\"TILE\",\"properties\":{}},\"type\":\"VECTOR_TILE\",\"minZoom\":0,\"maxZoom\":24},{\"id\":\"z52lq\",\"label\":\"logstash\",\"minZoom\":0,\"maxZoom\":24,\"sourceDescriptor\":{\"id\":\"e1a5e1a6-676c-4a89-8ea9-0d91d64b73c6\",\"type\":\"ES_SEARCH\",\"geoField\":\"geo.coordinates\",\"limit\":2048,\"filterByMapBounds\":true,\"showTooltip\":true,\"tooltipProperties\":[],\"applyGlobalQuery\":true,\"scalingType\":\"LIMIT\",\"indexPatternRefName\":\"layer_1_source_index_pattern\"},\"visible\":false,\"temporary\":false,\"style\":{\"type\":\"VECTOR\",\"properties\":{\"fillColor\":{\"type\":\"STATIC\",\"options\":{\"color\":\"#e6194b\"}},\"lineColor\":{\"type\":\"STATIC\",\"options\":{\"color\":\"#FFFFFF\"}},\"lineWidth\":{\"type\":\"STATIC\",\"options\":{\"size\":1}},\"iconSize\":{\"type\":\"STATIC\",\"options\":{\"size\":10}},\"symbolizeAs\":{\"options\":{\"value\":\"circle\"}},\"icon\":{\"type\":\"STATIC\",\"options\":{\"value\":\"marker\"}}},\"previousStyle\":null},\"type\":\"VECTOR\"}]", + "uiStateJSON" : "{\"isLayerTOCOpen\":true,\"openTOCDetails\":[]}" + }, + "type" : "map", + "references" : [ + { + "name" : "layer_1_source_index_pattern", + "type" : "index-pattern", + "id" : "c698b940-e149-11e8-a35a-370a8516603a" + } + ], + "migrationVersion" : { + "map" : "7.9.0" + }, + "updated_at" : "2020-07-23T01:16:47.600Z" + } + } +} + + + { "type": "doc", "value": { diff --git a/x-pack/test/functional/page_objects/graph_page.ts b/x-pack/test/functional/page_objects/graph_page.ts index 0d3e2c10579f5..fe049327fe38b 100644 --- a/x-pack/test/functional/page_objects/graph_page.ts +++ b/x-pack/test/functional/page_objects/graph_page.ts @@ -83,10 +83,7 @@ export function GraphPageProvider({ getService, getPageObjects }: FtrProviderCon return [this.getPositionAsString(x1, y1), this.getPositionAsString(x2, y2)]; } - async isolateEdge(edge: Edge) { - const from = edge.sourceNode.label; - const to = edge.targetNode.label; - + async isolateEdge(from: string, to: string) { // select all nodes await testSubjects.click('graphSelectAll'); @@ -109,13 +106,6 @@ export function GraphPageProvider({ getService, getPageObjects }: FtrProviderCon await testSubjects.click('graphRemoveSelection'); } - async clickEdge(edge: Edge) { - await this.stopLayout(); - await PageObjects.common.sleep(1000); - await edge.element.click(); - await this.startLayout(); - } - async stopLayout() { if (await testSubjects.exists('graphPauseLayout')) { await testSubjects.click('graphPauseLayout'); diff --git a/x-pack/test/functional/page_objects/lens_page.ts b/x-pack/test/functional/page_objects/lens_page.ts index d101c9754d562..79548db0e2630 100644 --- a/x-pack/test/functional/page_objects/lens_page.ts +++ b/x-pack/test/functional/page_objects/lens_page.ts @@ -195,5 +195,15 @@ export function LensPageProvider({ getService, getPageObjects }: FtrProviderCont async createLayer() { await testSubjects.click('lnsLayerAddButton'); }, + + async linkedToOriginatingApp() { + await PageObjects.header.waitUntilLoadingHasFinished(); + await testSubjects.existOrFail('lnsApp_saveAndReturnButton'); + }, + + async notLinkedToOriginatingApp() { + await PageObjects.header.waitUntilLoadingHasFinished(); + await testSubjects.missingOrFail('lnsApp_saveAndReturnButton'); + }, }); } diff --git a/x-pack/test/functional/services/ml/api.ts b/x-pack/test/functional/services/ml/api.ts index a48159cd7515f..9dfec3a17dec0 100644 --- a/x-pack/test/functional/services/ml/api.ts +++ b/x-pack/test/functional/services/ml/api.ts @@ -5,14 +5,12 @@ */ import expect from '@kbn/expect'; import { ProvidedType } from '@kbn/test/types/ftr'; +import { Calendar, CalendarEvent } from '../../../../plugins/ml/server/models/calendar/index'; import { DataFrameAnalyticsConfig } from '../../../../plugins/ml/public/application/data_frame_analytics/common'; - import { FtrProviderContext } from '../../ftr_provider_context'; - import { DATAFEED_STATE, JOB_STATE } from '../../../../plugins/ml/common/constants/states'; import { DATA_FRAME_TASK_STATE } from '../../../../plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/common'; import { Datafeed, Job } from '../../../../plugins/ml/common/types/anomaly_detection_jobs'; - export type MlApi = ProvidedType; export function MachineLearningAPIProvider({ getService }: FtrProviderContext) { @@ -325,19 +323,102 @@ export function MachineLearningAPIProvider({ getService }: FtrProviderContext) { }); }, - async getCalendar(calendarId: string) { - return await esSupertest.get(`/_ml/calendars/${calendarId}`).expect(200); + async getCalendar(calendarId: string, expectedCode = 200) { + return await esSupertest.get(`/_ml/calendars/${calendarId}`).expect(expectedCode); }, - async createCalendar(calendarId: string, body = { description: '', job_ids: [] }) { + async createCalendar( + calendarId: string, + requestBody: Partial = { description: '', job_ids: [] } + ) { log.debug(`Creating calendar with id '${calendarId}'...`); - await esSupertest.put(`/_ml/calendars/${calendarId}`).send(body).expect(200); + await esSupertest.put(`/_ml/calendars/${calendarId}`).send(requestBody).expect(200); + await this.waitForCalendarToExist(calendarId); + }, + + async deleteCalendar(calendarId: string) { + log.debug(`Deleting calendar with id '${calendarId}'...`); + await esSupertest.delete(`/_ml/calendars/${calendarId}`); + + await this.waitForCalendarNotToExist(calendarId); + }, + + async waitForCalendarToExist(calendarId: string, errorMsg?: string) { + await retry.waitForWithTimeout(`'${calendarId}' to exist`, 5 * 1000, async () => { + if (await this.getCalendar(calendarId, 200)) { + return true; + } else { + throw new Error(errorMsg || `expected calendar '${calendarId}' to exist`); + } + }); + }, - await retry.waitForWithTimeout(`'${calendarId}' to be created`, 30 * 1000, async () => { - if (await this.getCalendar(calendarId)) { + async waitForCalendarNotToExist(calendarId: string, errorMsg?: string) { + await retry.waitForWithTimeout(`'${calendarId}' to not exist`, 5 * 1000, async () => { + if (await this.getCalendar(calendarId, 404)) { return true; } else { - throw new Error(`expected calendar '${calendarId}' to be created`); + throw new Error(errorMsg || `expected calendar '${calendarId}' to not exist`); + } + }); + }, + + async createCalendarEvents(calendarId: string, events: CalendarEvent[]) { + log.debug(`Creating events for calendar with id '${calendarId}'...`); + await esSupertest.post(`/_ml/calendars/${calendarId}/events`).send({ events }).expect(200); + await this.waitForEventsToExistInCalendar(calendarId, events); + }, + + async getCalendarEvents(calendarId: string, expectedCode = 200) { + return await esSupertest.get(`/_ml/calendars/${calendarId}/events`).expect(expectedCode); + }, + + assertAllEventsExistInCalendar: ( + eventsToCheck: CalendarEvent[], + calendar: Calendar + ): boolean => { + const updatedCalendarEvents = calendar.events as CalendarEvent[]; + let allEventsAreUpdated = true; + for (const eventToCheck of eventsToCheck) { + // if at least one of the events that we need to check is not in the updated events + // no need to continue + if ( + updatedCalendarEvents.findIndex( + (updatedEvent) => + updatedEvent.description === eventToCheck.description && + updatedEvent.start_time === eventToCheck.start_time && + updatedEvent.end_time === eventToCheck.end_time + ) < 0 + ) { + allEventsAreUpdated = false; + break; + } + } + expect(allEventsAreUpdated).to.eql( + true, + `Expected calendar ${calendar.calendar_id} to contain events ${JSON.stringify( + eventsToCheck + )}` + ); + return true; + }, + + async waitForEventsToExistInCalendar( + calendarId: string, + eventsToCheck: CalendarEvent[], + errorMsg?: string + ) { + await retry.waitForWithTimeout(`'${calendarId}' events to exist`, 5 * 1000, async () => { + // validate if calendar events have been updated with the requested events + const { body } = await this.getCalendarEvents(calendarId, 200); + + if (this.assertAllEventsExistInCalendar(eventsToCheck, body)) { + return true; + } else { + throw new Error( + errorMsg || + `expected events for calendar '${calendarId}' to have been updated correctly` + ); } }); }, @@ -515,5 +596,43 @@ export function MachineLearningAPIProvider({ getService }: FtrProviderContext) { } ); }, + + async getFilter(filterId: string, expectedCode = 200) { + return await esSupertest.get(`/_ml/filters/${filterId}`).expect(expectedCode); + }, + + async createFilter(filterId: string, requestBody: object) { + log.debug(`Creating filter with id '${filterId}'...`); + await esSupertest.put(`/_ml/filters/${filterId}`).send(requestBody).expect(200); + + await this.waitForFilterToExist(filterId, `expected filter '${filterId}' to be created`); + }, + + async deleteFilter(filterId: string) { + log.debug(`Deleting filter with id '${filterId}'...`); + await esSupertest.delete(`/_ml/filters/${filterId}`); + + await this.waitForFilterToNotExist(filterId, `expected filter '${filterId}' to be deleted`); + }, + + async waitForFilterToExist(filterId: string, errorMsg?: string) { + await retry.waitForWithTimeout(`'${filterId}' to exist`, 5 * 1000, async () => { + if (await this.getFilter(filterId, 200)) { + return true; + } else { + throw new Error(errorMsg || `expected filter '${filterId}' to exist`); + } + }); + }, + + async waitForFilterToNotExist(filterId: string, errorMsg?: string) { + await retry.waitForWithTimeout(`'${filterId}' to not exist`, 5 * 1000, async () => { + if (await this.getFilter(filterId, 404)) { + return true; + } else { + throw new Error(errorMsg || `expected filter '${filterId}' to not exist`); + } + }); + }, }; } diff --git a/x-pack/test/functional/services/ml/data_visualizer_file_based.ts b/x-pack/test/functional/services/ml/data_visualizer_file_based.ts index eea0a83879ea7..8c5e40dd5dbdd 100644 --- a/x-pack/test/functional/services/ml/data_visualizer_file_based.ts +++ b/x-pack/test/functional/services/ml/data_visualizer_file_based.ts @@ -101,7 +101,7 @@ export function MachineLearningDataVisualizerFileBasedProvider( }, async startImportAndWaitForProcessing() { - await testSubjects.click('mlFileDataVisImportButton'); + await testSubjects.clickWhenNotDisabled('mlFileDataVisImportButton'); await retry.tryForTime(60 * 1000, async () => { await testSubjects.existOrFail('mlFileImportSuccessCallout'); }); diff --git a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts index 09c4156854506..fa714e8374ec7 100644 --- a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts +++ b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts @@ -86,7 +86,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { await testSubjects.click('variableMenuButton-1'); expect(await messageTextArea.getAttribute('value')).to.eql( - 'test message {{alertId}} some additional text {{alertName}}' + 'test message {{alertId}} some additional text {{alertInstanceId}}' ); await testSubjects.click('saveAlertButton'); diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/install.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/install_overrides.ts similarity index 100% rename from x-pack/test/ingest_manager_api_integration/apis/epm/install.ts rename to x-pack/test/ingest_manager_api_integration/apis/epm/install_overrides.ts diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts new file mode 100644 index 0000000000000..9ca8ebf136078 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import expect from '@kbn/expect'; +import { FtrProviderContext } from '../../../api_integration/ftr_provider_context'; +import { skipIfNoDockerRegistry } from '../../helpers'; + +export default function (providerContext: FtrProviderContext) { + const { getService } = providerContext; + const kibanaServer = getService('kibanaServer'); + const supertest = getService('supertest'); + const es = getService('es'); + const pkgName = 'all_assets'; + const pkgVersion = '0.1.0'; + const pkgKey = `${pkgName}-${pkgVersion}`; + const logsTemplateName = `logs-${pkgName}.test_logs`; + const metricsTemplateName = `metrics-${pkgName}.test_metrics`; + + const uninstallPackage = async (pkg: string) => { + await supertest.delete(`/api/ingest_manager/epm/packages/${pkg}`).set('kbn-xsrf', 'xxxx'); + }; + const installPackage = async (pkg: string) => { + await supertest.post(`/api/ingest_manager/epm/packages/${pkg}`).set('kbn-xsrf', 'xxxx'); + }; + + describe('installs and uninstalls all assets', async () => { + describe('installs all assets when installing a package for the first time', async () => { + skipIfNoDockerRegistry(providerContext); + before(async () => { + await installPackage(pkgKey); + }); + it('should have installed the ILM policy', async function () { + const resPolicy = await es.transport.request({ + method: 'GET', + path: `/_ilm/policy/all_assets`, + }); + expect(resPolicy.statusCode).equal(200); + }); + it('should have installed the index templates', async function () { + const resLogsTemplate = await es.transport.request({ + method: 'GET', + path: `/_index_template/${logsTemplateName}`, + }); + expect(resLogsTemplate.statusCode).equal(200); + + const resMetricsTemplate = await es.transport.request({ + method: 'GET', + path: `/_index_template/${metricsTemplateName}`, + }); + expect(resMetricsTemplate.statusCode).equal(200); + }); + it('should have installed the pipelines', async function () { + const res = await es.transport.request({ + method: 'GET', + path: `/_ingest/pipeline/${logsTemplateName}-${pkgVersion}`, + }); + expect(res.statusCode).equal(200); + }); + it('should have installed the template components', async function () { + const res = await es.transport.request({ + method: 'GET', + path: `/_component_template/${logsTemplateName}-mappings`, + }); + expect(res.statusCode).equal(200); + const resSettings = await es.transport.request({ + method: 'GET', + path: `/_component_template/${logsTemplateName}-settings`, + }); + expect(resSettings.statusCode).equal(200); + }); + it('should have installed the kibana assets', async function () { + const resIndexPatternLogs = await kibanaServer.savedObjects.get({ + type: 'index-pattern', + id: 'logs-*', + }); + expect(resIndexPatternLogs.id).equal('logs-*'); + const resIndexPatternMetrics = await kibanaServer.savedObjects.get({ + type: 'index-pattern', + id: 'metrics-*', + }); + expect(resIndexPatternMetrics.id).equal('metrics-*'); + const resIndexPatternEvents = await kibanaServer.savedObjects.get({ + type: 'index-pattern', + id: 'events-*', + }); + expect(resIndexPatternEvents.id).equal('events-*'); + const resDashboard = await kibanaServer.savedObjects.get({ + type: 'dashboard', + id: 'sample_dashboard', + }); + expect(resDashboard.id).equal('sample_dashboard'); + const resDashboard2 = await kibanaServer.savedObjects.get({ + type: 'dashboard', + id: 'sample_dashboard2', + }); + expect(resDashboard2.id).equal('sample_dashboard2'); + const resVis = await kibanaServer.savedObjects.get({ + type: 'visualization', + id: 'sample_visualization', + }); + expect(resVis.id).equal('sample_visualization'); + const resSearch = await kibanaServer.savedObjects.get({ + type: 'search', + id: 'sample_search', + }); + expect(resSearch.id).equal('sample_search'); + }); + }); + + describe('uninstalls all assets when uninstalling a package', async () => { + skipIfNoDockerRegistry(providerContext); + before(async () => { + await uninstallPackage(pkgKey); + }); + it('should have uninstalled the index templates', async function () { + const resLogsTemplate = await es.transport.request( + { + method: 'GET', + path: `/_index_template/${logsTemplateName}`, + }, + { + ignore: [404], + } + ); + expect(resLogsTemplate.statusCode).equal(404); + + const resMetricsTemplate = await es.transport.request( + { + method: 'GET', + path: `/_index_template/${metricsTemplateName}`, + }, + { + ignore: [404], + } + ); + expect(resMetricsTemplate.statusCode).equal(404); + }); + it('should have uninstalled the pipelines', async function () { + const res = await es.transport.request( + { + method: 'GET', + path: `/_ingest/pipeline/${logsTemplateName}-${pkgVersion}`, + }, + { + ignore: [404], + } + ); + expect(res.statusCode).equal(404); + }); + it('should have uninstalled the kibana assets', async function () { + let resDashboard; + try { + resDashboard = await kibanaServer.savedObjects.get({ + type: 'dashboard', + id: 'sample_dashboard', + }); + } catch (err) { + resDashboard = err; + } + expect(resDashboard.response.data.statusCode).equal(404); + let resDashboard2; + try { + resDashboard2 = await kibanaServer.savedObjects.get({ + type: 'dashboard', + id: 'sample_dashboard2', + }); + } catch (err) { + resDashboard2 = err; + } + expect(resDashboard2.response.data.statusCode).equal(404); + let resVis; + try { + resVis = await kibanaServer.savedObjects.get({ + type: 'visualization', + id: 'sample_visualization', + }); + } catch (err) { + resVis = err; + } + expect(resVis.response.data.statusCode).equal(404); + let resSearch; + try { + resVis = await kibanaServer.savedObjects.get({ + type: 'search', + id: 'sample_search', + }); + } catch (err) { + resSearch = err; + } + expect(resSearch.response.data.statusCode).equal(404); + }); + }); + }); +} diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/list.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/list.ts index 74aaf48d15674..2fbda8f2d3c81 100644 --- a/x-pack/test/ingest_manager_api_integration/apis/epm/list.ts +++ b/x-pack/test/ingest_manager_api_integration/apis/epm/list.ts @@ -29,7 +29,7 @@ export default function ({ getService }: FtrProviderContext) { return response.body; }; const listResponse = await fetchPackageList(); - expect(listResponse.response.length).to.be(5); + expect(listResponse.response.length).to.be(6); } else { warnAndSkipTest(this, log); } diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ilm_policy/all_assets.json b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ilm_policy/all_assets.json new file mode 100644 index 0000000000000..7cf62e890f865 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ilm_policy/all_assets.json @@ -0,0 +1,15 @@ +{ + "policy": { + "phases": { + "hot": { + "min_age": "0ms", + "actions": { + "rollover": { + "max_size": "50gb", + "max_age": "30d" + } + } + } + } + } +} \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ingest_pipeline/default.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ingest_pipeline/default.yml new file mode 100644 index 0000000000000..580db049d0d5d --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/elasticsearch/ingest_pipeline/default.yml @@ -0,0 +1,7 @@ +--- +description: Pipeline for parsing test logs + plugins. +processors: +- set: + field: error.message + value: '{{ _ingest.on_failure_message }}' \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/fields/fields.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/fields/fields.yml new file mode 100644 index 0000000000000..12a9a03c1337b --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/fields/fields.yml @@ -0,0 +1,16 @@ +- name: dataset.type + type: constant_keyword + description: > + Dataset type. +- name: dataset.name + type: constant_keyword + description: > + Dataset name. +- name: dataset.namespace + type: constant_keyword + description: > + Dataset namespace. +- name: '@timestamp' + type: date + description: > + Event timestamp. diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/manifest.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/manifest.yml new file mode 100644 index 0000000000000..8cd522e2845bb --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_logs/manifest.yml @@ -0,0 +1,9 @@ +title: Test Dataset + +type: logs + +elasticsearch: + index_template.mappings: + dynamic: false + index_template.settings: + index.lifecycle.name: reference \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/fields/fields.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/fields/fields.yml new file mode 100644 index 0000000000000..12a9a03c1337b --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/fields/fields.yml @@ -0,0 +1,16 @@ +- name: dataset.type + type: constant_keyword + description: > + Dataset type. +- name: dataset.name + type: constant_keyword + description: > + Dataset name. +- name: dataset.namespace + type: constant_keyword + description: > + Dataset namespace. +- name: '@timestamp' + type: date + description: > + Event timestamp. diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/manifest.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/manifest.yml new file mode 100644 index 0000000000000..6bc20442bd432 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/dataset/test_metrics/manifest.yml @@ -0,0 +1,3 @@ +title: Test Dataset + +type: metrics \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/docs/README.md b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/docs/README.md new file mode 100644 index 0000000000000..2617f1fcabe11 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/docs/README.md @@ -0,0 +1,3 @@ +# Test package + +For testing that a package installs its elasticsearch assets when installed for the first time (not updating) and removing the package diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/img/logo_overrides_64_color.svg b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/img/logo_overrides_64_color.svg new file mode 100644 index 0000000000000..b03007a76ffcc --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/img/logo_overrides_64_color.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard.json b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard.json new file mode 100644 index 0000000000000..ef08d69324210 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard.json @@ -0,0 +1,16 @@ +{ + "attributes": { + "description": "Sample dashboard", + "hits": 0, + "kibanaSavedObjectMeta": { + "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}" + }, + "optionsJSON": "{\"darkTheme\":false}", + "panelsJSON": "[{\"embeddableConfig\":{},\"gridData\":{\"h\":12,\"i\":\"1\",\"w\":24,\"x\":0,\"y\":0},\"panelIndex\":\"1\",\"panelRefName\":\"panel_0\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{\"columns\":[\"kafka.log.class\",\"kafka.log.trace.class\",\"kafka.log.trace.full\"],\"sort\":[\"@timestamp\",\"desc\"]},\"gridData\":{\"h\":12,\"i\":\"2\",\"w\":24,\"x\":24,\"y\":0},\"panelIndex\":\"2\",\"panelRefName\":\"panel_1\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{\"columns\":[\"log.level\",\"kafka.log.component\",\"message\"],\"sort\":[\"@timestamp\",\"desc\"]},\"gridData\":{\"h\":20,\"i\":\"3\",\"w\":48,\"x\":0,\"y\":20},\"panelIndex\":\"3\",\"panelRefName\":\"panel_2\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{},\"gridData\":{\"h\":8,\"i\":\"4\",\"w\":48,\"x\":0,\"y\":12},\"panelIndex\":\"4\",\"panelRefName\":\"panel_3\",\"version\":\"7.3.0\"}]", + "timeRestore": false, + "title": "[Logs Sample] Overview ECS", + "version": 1 + }, + "id": "sample_dashboard", + "type": "dashboard" +} \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard2.json b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard2.json new file mode 100644 index 0000000000000..7ea63c5d444ba --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/dashboard/sample_dashboard2.json @@ -0,0 +1,16 @@ +{ + "attributes": { + "description": "Sample dashboard 2", + "hits": 0, + "kibanaSavedObjectMeta": { + "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}" + }, + "optionsJSON": "{\"darkTheme\":false}", + "panelsJSON": "[{\"embeddableConfig\":{},\"gridData\":{\"h\":12,\"i\":\"1\",\"w\":24,\"x\":0,\"y\":0},\"panelIndex\":\"1\",\"panelRefName\":\"panel_0\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{\"columns\":[\"kafka.log.class\",\"kafka.log.trace.class\",\"kafka.log.trace.full\"],\"sort\":[\"@timestamp\",\"desc\"]},\"gridData\":{\"h\":12,\"i\":\"2\",\"w\":24,\"x\":24,\"y\":0},\"panelIndex\":\"2\",\"panelRefName\":\"panel_1\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{\"columns\":[\"log.level\",\"kafka.log.component\",\"message\"],\"sort\":[\"@timestamp\",\"desc\"]},\"gridData\":{\"h\":20,\"i\":\"3\",\"w\":48,\"x\":0,\"y\":20},\"panelIndex\":\"3\",\"panelRefName\":\"panel_2\",\"version\":\"7.3.0\"},{\"embeddableConfig\":{},\"gridData\":{\"h\":8,\"i\":\"4\",\"w\":48,\"x\":0,\"y\":12},\"panelIndex\":\"4\",\"panelRefName\":\"panel_3\",\"version\":\"7.3.0\"}]", + "timeRestore": false, + "title": "[Logs Sample2] Overview ECS", + "version": 1 + }, + "id": "sample_dashboard2", + "type": "dashboard" +} \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/search/sample_search.json b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/search/sample_search.json new file mode 100644 index 0000000000000..28185affabef8 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/search/sample_search.json @@ -0,0 +1,24 @@ +{ + "attributes": { + "columns": [ + "log.level", + "kafka.log.component", + "message" + ], + "description": "", + "hits": 0, + "kibanaSavedObjectMeta": { + "searchSourceJSON": "{\"filter\":[{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.filter[0].meta.index\",\"key\":\"dataset.name\",\"negate\":false,\"params\":{\"query\":\"kafka.log\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"log\"},\"query\":{\"match\":{\"dataset.name\":{\"query\":\"kafka.log\",\"type\":\"phrase\"}}}}],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}" + }, + "sort": [ + [ + "@timestamp", + "desc" + ] + ], + "title": "All logs [Logs Kafka] ECS", + "version": 1 + }, + "id": "sample_search", + "type": "search" +} \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/visualization/sample_visualization.json b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/visualization/sample_visualization.json new file mode 100644 index 0000000000000..e814b83bbf324 --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/kibana/visualization/sample_visualization.json @@ -0,0 +1,11 @@ +{ + "attributes": { + "description": "sample visualization", + "title": "sample vis title", + "uiStateJSON": "{}", + "version": 1, + "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"extended_bounds\":{},\"field\":\"@timestamp\",\"interval\":\"auto\",\"min_doc_count\":1},\"schema\":\"segment\",\"type\":\"date_histogram\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"Log Level\",\"field\":\"log.level\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":5},\"schema\":\"group\",\"type\":\"terms\"}],\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"@timestamp per day\"},\"type\":\"category\"}],\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"legendPosition\":\"right\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"histogram\",\"valueAxis\":\"ValueAxis-1\"}],\"times\":[],\"type\":\"histogram\",\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}]},\"title\":\"Log levels over time [Logs Kafka] ECS\",\"type\":\"histogram\"}" + }, + "id": "sample_visualization", + "type": "visualization" +} \ No newline at end of file diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/manifest.yml b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/manifest.yml new file mode 100644 index 0000000000000..3c11b5103fbeb --- /dev/null +++ b/x-pack/test/ingest_manager_api_integration/apis/fixtures/test_packages/all_assets/0.1.0/manifest.yml @@ -0,0 +1,20 @@ +format_version: 1.0.0 +name: all_assets +title: All Assets Installed/Uninstalled Test +description: This is a test package for testing that all assets were installed when installing a package for the first time and removing the assets during package uninstall +version: 0.1.0 +categories: [] +release: beta +type: integration +license: basic + +requirement: + elasticsearch: + versions: '>7.7.0' + kibana: + versions: '>7.7.0' + +icons: + - src: '/img/logo_overrides_64_color.svg' + size: '16x16' + type: 'image/svg+xml' diff --git a/x-pack/test/ingest_manager_api_integration/apis/index.js b/x-pack/test/ingest_manager_api_integration/apis/index.js index c0c8ce3ff082c..1045ff5d82d12 100644 --- a/x-pack/test/ingest_manager_api_integration/apis/index.js +++ b/x-pack/test/ingest_manager_api_integration/apis/index.js @@ -16,7 +16,8 @@ export default function ({ loadTestFile }) { loadTestFile(require.resolve('./epm/file')); //loadTestFile(require.resolve('./epm/template')); loadTestFile(require.resolve('./epm/ilm')); - loadTestFile(require.resolve('./epm/install')); + loadTestFile(require.resolve('./epm/install_overrides')); + loadTestFile(require.resolve('./epm/install_remove_assets')); // Package configs loadTestFile(require.resolve('./package_config/create')); diff --git a/x-pack/test/ingest_manager_api_integration/config.ts b/x-pack/test/ingest_manager_api_integration/config.ts index 6f5d8eed43519..2aa2e62a4b9e1 100644 --- a/x-pack/test/ingest_manager_api_integration/config.ts +++ b/x-pack/test/ingest_manager_api_integration/config.ts @@ -8,7 +8,6 @@ import path from 'path'; import { FtrConfigProviderContext } from '@kbn/test/types/ftr'; import { defineDockerServersConfig } from '@kbn/test'; -import { services } from '../api_integration/services'; export default async function ({ readConfigFile }: FtrConfigProviderContext) { const xPackAPITestsConfig = await readConfigFile(require.resolve('../api_integration/config.ts')); @@ -49,9 +48,7 @@ export default async function ({ readConfigFile }: FtrConfigProviderContext) { }), esArchiver: xPackAPITestsConfig.get('esArchiver'), services: { - ...services, - supertest: xPackAPITestsConfig.get('services.supertest'), - es: xPackAPITestsConfig.get('services.es'), + ...xPackAPITestsConfig.get('services'), }, junit: { reportName: 'X-Pack EPM API Integration Tests', diff --git a/x-pack/test/ingest_manager_api_integration/helpers.ts b/x-pack/test/ingest_manager_api_integration/helpers.ts index b1755e30f61f5..a5ffc4e7adc24 100644 --- a/x-pack/test/ingest_manager_api_integration/helpers.ts +++ b/x-pack/test/ingest_manager_api_integration/helpers.ts @@ -22,7 +22,7 @@ export function skipIfNoDockerRegistry(providerContext: FtrProviderContext) { const server = dockerServers.get('registry'); const log = getService('log'); - beforeEach(function beforeSetupWithDockerRegistyry() { + beforeEach(function beforeSetupWithDockerRegistry() { if (!server.enabled) { warnAndSkipTest(this, log); } diff --git a/x-pack/test/security_solution_endpoint/apps/endpoint/policy_list.ts b/x-pack/test/security_solution_endpoint/apps/endpoint/policy_list.ts index 57321ab4cd911..0c5e15ed4104c 100644 --- a/x-pack/test/security_solution_endpoint/apps/endpoint/policy_list.ts +++ b/x-pack/test/security_solution_endpoint/apps/endpoint/policy_list.ts @@ -78,7 +78,7 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) { 'Protect East Coastrev. 1', 'elastic', 'elastic', - `${policyInfo.packageConfig.package?.title} v${policyInfo.packageConfig.package?.version}`, + `v${policyInfo.packageConfig.package?.version}`, '', ]); [policyRow[2], policyRow[4]].forEach((relativeDate) => { @@ -131,6 +131,11 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) { expect(endpointConfig).not.to.be(undefined); }); + it('should have empty value for package configuration name', async () => { + await pageObjects.ingestManagerCreatePackageConfig.selectAgentConfig(); + expect(await pageObjects.ingestManagerCreatePackageConfig.getPackageConfigName()).to.be(''); + }); + it('should redirect user back to Policy List after a successful save', async () => { const newPolicyName = `endpoint policy ${Date.now()}`; await pageObjects.ingestManagerCreatePackageConfig.selectAgentConfig(); diff --git a/x-pack/test/security_solution_endpoint/page_objects/ingest_manager_create_package_config_page.ts b/x-pack/test/security_solution_endpoint/page_objects/ingest_manager_create_package_config_page.ts index dd3fc637a3d6c..dfdb528b7362c 100644 --- a/x-pack/test/security_solution_endpoint/page_objects/ingest_manager_create_package_config_page.ts +++ b/x-pack/test/security_solution_endpoint/page_objects/ingest_manager_create_package_config_page.ts @@ -62,6 +62,13 @@ export function IngestManagerCreatePackageConfig({ } }, + /** + * Returns the package config name currently populated on the input field + */ + async getPackageConfigName() { + return testSubjects.getAttribute('packageConfigNameInput', 'value'); + }, + /** * Set the name of the package config on the input field * @param name diff --git a/yarn.lock b/yarn.lock index 4cc802e328ab8..1bb8fab0372ae 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4625,10 +4625,10 @@ resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a" integrity sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA== -"@types/archiver@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-3.0.0.tgz#c0a53e0ed3b7aef626ce683d081d7821d8c638b4" - integrity sha512-orghAMOF+//wSg4ru2znk6jt0eIPvKTtMVLH7XcYcjbcRyAXRClDlh27QVdqnAvVM37yu9xDP6Nh7egRhNr8tQ== +"@types/archiver@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-3.1.0.tgz#0d5bd922ba5cf06e137cd6793db7942439b1805e" + integrity sha512-nTvHwgWONL+iXG+9CX+gnQ/tTOV+qucAjwpXqeUn4OCRMxP42T29FFP/7XaOo0EqqO3TlENhObeZEe7RUJAriw== dependencies: "@types/glob" "*"