diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 012e49690fd15..5680bd8cbf822 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -81,6 +81,9 @@ /x-pack/legacy/plugins/rollup/ @elastic/es-ui /x-pack/legacy/plugins/searchprofiler/ @elastic/es-ui /x-pack/legacy/plugins/snapshot_restore/ @elastic/es-ui +# ML team owns the transform plugin, ES team added here for visibility +# because the plugin lives in Kibana's Elasticsearch management section. +/x-pack/legacy/plugins/transform/ @elastic/es-ui /x-pack/legacy/plugins/watcher/ @elastic/es-ui # Kibana TSVB external contractors diff --git a/Jenkinsfile b/Jenkinsfile index 2cd9c372dee9c..fca814b265295 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -17,8 +17,6 @@ stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a 'oss-ciGroup4': getOssCiGroupWorker(4), 'oss-ciGroup5': getOssCiGroupWorker(5), 'oss-ciGroup6': getOssCiGroupWorker(6), - ]), - 'kibana-oss-agent2': withWorkers('kibana-oss-tests2', { buildOss() }, [ 'oss-ciGroup7': getOssCiGroupWorker(7), 'oss-ciGroup8': getOssCiGroupWorker(8), 'oss-ciGroup9': getOssCiGroupWorker(9), @@ -34,8 +32,6 @@ stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a 'xpack-ciGroup3': getXpackCiGroupWorker(3), 'xpack-ciGroup4': getXpackCiGroupWorker(4), 'xpack-ciGroup5': getXpackCiGroupWorker(5), - ]), - 'kibana-xpack-agent2': withWorkers('kibana-xpack-tests2', { buildXpack() }, [ 'xpack-ciGroup6': getXpackCiGroupWorker(6), 'xpack-ciGroup7': getXpackCiGroupWorker(7), 'xpack-ciGroup8': getXpackCiGroupWorker(8), @@ -60,7 +56,7 @@ stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) { return { - jobRunner('tests-xl') { + jobRunner('tests-xl', true) { try { doSetup() preWorkerClosure() @@ -151,7 +147,7 @@ def legacyJobRunner(name) { withEnv([ "JOB=${name}", ]) { - jobRunner('linux && immutable') { + jobRunner('linux && immutable', false) { try { runbld('.ci/run.sh', true) } finally { @@ -172,8 +168,21 @@ def legacyJobRunner(name) { } } -def jobRunner(label, closure) { +def jobRunner(label, useRamDisk, closure) { node(label) { + if (useRamDisk) { + // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm + def originalWorkspace = env.WORKSPACE + ws('/tmp/workspace') { + sh """ + mkdir -p /dev/shm/workspace + mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist + rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it + ln -s /dev/shm/workspace '${originalWorkspace}' + """ + } + } + def scmVars = checkout scm withEnv([ diff --git a/docs/api/saved-objects/bulk_create.asciidoc b/docs/api/saved-objects/bulk_create.asciidoc index ca8cc0f287015..d649684bc30f2 100644 --- a/docs/api/saved-objects/bulk_create.asciidoc +++ b/docs/api/saved-objects/bulk_create.asciidoc @@ -11,6 +11,15 @@ experimental[] Create multiple {kib} saved objects. `POST /api/saved_objects/_bulk_create` +`POST /s//api/saved_objects/_bulk_create` + + +[[saved-objects-api-bulk-create-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL the default space is used. + [[saved-objects-api-bulk-create-query-params]] ==== Query parameters @@ -38,7 +47,7 @@ experimental[] Create multiple {kib} saved objects. [[saved-objects-api-bulk-create-response-body]] ==== Response body -`saved_objects`:: +`saved_objects`:: (array) Top-level property the contains objects that represent the response for each of the requested objects. The order of the objects in the response is identical to the order of the objects in the request. Saved objects that are unable to persist are replaced with an error object. @@ -46,13 +55,13 @@ Saved objects that are unable to persist are replaced with an error object. [[saved-objects-api-bulk-create-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. [[saved-objects-api-bulk-create-example]] ==== Example -Create an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID: +Create an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID: [source,js] -------------------------------------------------- @@ -67,7 +76,7 @@ POST api/saved_objects/_bulk_create }, { "type": "dashboard", - "id": "my-dashboard", + "id": "be3733a0-9efe-11e7-acb3-3dab96693fab", "attributes": { "title": "Look at my dashboard" } @@ -91,7 +100,7 @@ The API returns the following: } }, { - "id": "my-dashboard", + "id": "be3733a0-9efe-11e7-acb3-3dab96693fab", "type": "dashboard", "error": { "statusCode": 409, diff --git a/docs/api/saved-objects/bulk_get.asciidoc b/docs/api/saved-objects/bulk_get.asciidoc index 4f2cbcb980f82..3ef5823716d79 100644 --- a/docs/api/saved-objects/bulk_get.asciidoc +++ b/docs/api/saved-objects/bulk_get.asciidoc @@ -11,6 +11,14 @@ experimental[] Retrieve multiple {kib} saved objects by ID. `POST /api/saved_objects/_bulk_get` +`POST /s//api/saved_objects/_bulk_get` + +[[saved-objects-api-bulk-get-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + [[saved-objects-api-bulk-get-request-body]] ==== Request Body @@ -18,15 +26,15 @@ experimental[] Retrieve multiple {kib} saved objects by ID. (Required, string) Valid options include `visualization`, `dashboard`, `search`, `index-pattern`, `config`, and `timelion-sheet`. `id`:: - (Required, string) ID of the retrieved object. + (Required, string) ID of the retrieved object. The ID includes the {kib} unique identifier or a custom identifier. `fields`:: (Optional, array) The fields returned in the object response. - + [[saved-objects-api-bulk-get-response-body]] ==== Response body -`saved_objects`:: +`saved_objects`:: (array) Top-level property the contains objects that represent the response for each of the requested objects. The order of the objects in the response is identical to the order of the objects in the request. Saved objects that are unable to persist are replaced with an error object. @@ -34,13 +42,13 @@ Saved objects that are unable to persist are replaced with an error object. [[saved-objects-api-bulk-get-body-codes]] ==== Response code -`200`:: - Indicates a successfully call. +`200`:: + Indicates a successful call. [[saved-objects-api-bulk-get-body-example]] ==== Example -Retrieve an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID: +Retrieve an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID: [source,js] -------------------------------------------------- @@ -52,7 +60,7 @@ POST api/saved_objects/_bulk_get }, { "type": "dashboard", - "id": "my-dashboard" + "id": "be3733a0-9efe-11e7-acb3-3dab96693fab" } ] -------------------------------------------------- diff --git a/docs/api/saved-objects/create.asciidoc b/docs/api/saved-objects/create.asciidoc index fecc3f3732f2a..634c71bb4eefe 100644 --- a/docs/api/saved-objects/create.asciidoc +++ b/docs/api/saved-objects/create.asciidoc @@ -13,9 +13,14 @@ experimental[] Create {kib} saved objects. `POST /api/saved_objects//` +`POST /s//saved_objects/` + [[saved-objects-api-create-path-params]] ==== Path parameters +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + ``:: (Required, string) Valid options include `visualization`, `dashboard`, `search`, `index-pattern`, `config`, and `timelion-sheet`. @@ -44,9 +49,9 @@ any data that you send to the API is properly formed. [[saved-objects-api-create-request-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. - + [[saved-objects-api-create-example]] ==== Example diff --git a/docs/api/saved-objects/delete.asciidoc b/docs/api/saved-objects/delete.asciidoc index 4a96cf554f784..c34f9b67dfd22 100644 --- a/docs/api/saved-objects/delete.asciidoc +++ b/docs/api/saved-objects/delete.asciidoc @@ -13,9 +13,14 @@ WARNING: Once you delete a saved object, _it cannot be recovered_. `DELETE /api/saved_objects//` +`DELETE /s//api/saved_objects//` + [[saved-objects-api-delete-path-params]] ==== Path parameters +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + `type`:: (Required, string) Valid options include `visualization`, `dashboard`, `search`, `index-pattern`, `config`, and `timelion-sheet`. @@ -25,7 +30,7 @@ WARNING: Once you delete a saved object, _it cannot be recovered_. [[saved-objects-api-delete-response-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. ==== Examples diff --git a/docs/api/saved-objects/export.asciidoc b/docs/api/saved-objects/export.asciidoc index ee56e6bad75c8..1b4f50dda2ddb 100644 --- a/docs/api/saved-objects/export.asciidoc +++ b/docs/api/saved-objects/export.asciidoc @@ -11,15 +11,23 @@ experimental[] Retrieve sets of saved objects that you want to import into {kib} `POST /api/saved_objects/_export` +`POST /s//api/saved_objects/_export` + +[[saved-objects-api-export-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + [[saved-objects-api-export-request-request-body]] ==== Request body `type`:: (Optional, array|string) The saved object types to include in the export. - + `objects`:: (Optional, array) A list of objects to export. - + `includeReferencesDeep`:: (Optional, boolean) Includes all of the referenced objects in the exported objects. @@ -50,7 +58,7 @@ When `excludeExportDetails=false` (the default) we append an export result detai [[export-objects-api-create-request-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. [[ssaved-objects-api-create-example]] diff --git a/docs/api/saved-objects/find.asciidoc b/docs/api/saved-objects/find.asciidoc index f20ded78e0743..955c50922fde7 100644 --- a/docs/api/saved-objects/find.asciidoc +++ b/docs/api/saved-objects/find.asciidoc @@ -11,33 +11,41 @@ experimental[] Retrieve a paginated set of {kib} saved objects by various condit `GET /api/saved_objects/_find` +`GET /s//api/saved_objects/_find` + +[[saved-objects-api-find-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + [[saved-objects-api-find-query-params]] ==== Query Parameters `type`:: (Required, array|string) The saved object types to include in the export. - + `per_page`:: (Optional, number) The number of objects to return per page. - + `page`:: (Optional, number) The page of objects to return. - + `search`:: (Optional, string) An Elasticsearch {ref}/query-dsl-simple-query-string-query.html[simple_query_string] query that filters the objects in the response. - + `default_search_operator`:: (Optional, string) The default operator to use for the `simple_query_string`. - + `search_fields`:: (Optional, array|string) The fields to perform the `simple_query_string` parsed query against. - + `fields`:: (Optional, array|string) The fields to return in the response. - + `sort_field`:: (Optional, string) The field that sorts the response. - + `has_reference`:: (Optional, object) Filters to objects that have a relationship with the type and ID combination. @@ -52,7 +60,7 @@ change. Use the find API for traditional paginated results, but avoid using it t [[saved-objects-api-find-request-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. ==== Examples @@ -92,4 +100,3 @@ query parameter for each value: GET api/saved_objects/_find?fields=id&fields=title -------------------------------------------------- // KIBANA - diff --git a/docs/api/saved-objects/get.asciidoc b/docs/api/saved-objects/get.asciidoc index f0c22ced4b121..29f8ef67e0a83 100644 --- a/docs/api/saved-objects/get.asciidoc +++ b/docs/api/saved-objects/get.asciidoc @@ -11,9 +11,15 @@ experimental[] Retrieve a single {kib} saved object by ID. `GET /api/saved_objects//` +`GET /s//api/saved_objects//` + [[saved-objects-api-get-params]] ==== Path parameters +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + + `type`:: (Required, string) Valid options include `visualization`, `dashboard`, `search`, `index-pattern`, `config`, and `timelion-sheet`. @@ -23,10 +29,10 @@ experimental[] Retrieve a single {kib} saved object by ID. [[saved-objects-api-get-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. -[[saved-objects-api-get-example]] +[[saved-objects-api-get-example]] ==== Example Retrieve the index pattern object with the `my-pattern` ID: @@ -50,3 +56,59 @@ The API returns the following: } } -------------------------------------------------- + +The following example retrieves a dashboard object in the `testspace` by id. + +[source,js] +-------------------------------------------------- +GET /s/testspace/api/saved_objects/dashboard/7adfa750-4c81-11e8-b3d7-01146121b73d +-------------------------------------------------- +// KIBANA + +The API returns the following: + +[source,js] +-------------------------------------------------- +{ + "id": "7adfa750-4c81-11e8-b3d7-01146121b73d", + "type": "dashboard", + "updated_at": "2019-07-23T00:11:07.059Z", + "version": "WzQ0LDFd", + "attributes": { + "title": "[Flights] Global Flight Dashboard", + "hits": 0, + "description": "Analyze mock flight data for ES-Air, Logstash Airways, Kibana Airlines and JetBeats", + "panelsJSON": "[{\"panelIndex\":\"1\",\"gridData\":{\"x\":0,\"y\":0,\"w\":32,\"h\":7,\"i\":\"1\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_0\"},{\"panelIndex\":\"3\",\"gridData\":{\"x\":17,\"y\":7,\"w\":23,\"h\":12,\"i\":\"3\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"Average Ticket Price\":\"#0A50A1\",\"Flight Count\":\"#82B5D8\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_1\"},{\"panelIndex\":\"4\",\"gridData\":{\"x\":0,\"y\":85,\"w\":48,\"h\":15,\"i\":\"4\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_2\"},{\"panelIndex\":\"5\",\"gridData\":{\"x\":0,\"y\":7,\"w\":17,\"h\":12,\"i\":\"5\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"ES-Air\":\"#447EBC\",\"JetBeats\":\"#65C5DB\",\"Kibana Airlines\":\"#BA43A9\",\"Logstash Airways\":\"#E5AC0E\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_3\"},{\"panelIndex\":\"6\",\"gridData\":{\"x\":24,\"y\":33,\"w\":24,\"h\":14,\"i\":\"6\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"Carrier Delay\":\"#5195CE\",\"Late Aircraft Delay\":\"#1F78C1\",\"NAS Delay\":\"#70DBED\",\"No Delay\":\"#BADFF4\",\"Security Delay\":\"#052B51\",\"Weather Delay\":\"#6ED0E0\"}}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_4\"},{\"panelIndex\":\"7\",\"gridData\":{\"x\":24,\"y\":19,\"w\":24,\"h\":14,\"i\":\"7\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_5\"},{\"panelIndex\":\"10\",\"gridData\":{\"x\":0,\"y\":35,\"w\":24,\"h\":12,\"i\":\"10\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"Count\":\"#1F78C1\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_6\"},{\"panelIndex\":\"13\",\"gridData\":{\"x\":10,\"y\":19,\"w\":14,\"h\":8,\"i\":\"13\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"Count\":\"#1F78C1\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_7\"},{\"panelIndex\":\"14\",\"gridData\":{\"x\":10,\"y\":27,\"w\":14,\"h\":8,\"i\":\"14\"},\"embeddableConfig\":{\"vis\":{\"colors\":{\"Count\":\"#1F78C1\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_8\"},{\"panelIndex\":\"18\",\"gridData\":{\"x\":24,\"y\":70,\"w\":24,\"h\":15,\"i\":\"18\"},\"embeddableConfig\":{\"mapCenter\":[27.421687059550266,15.371002131141724],\"mapZoom\":1},\"version\":\"6.3.0\",\"panelRefName\":\"panel_9\"},{\"panelIndex\":\"21\",\"gridData\":{\"x\":0,\"y\":62,\"w\":48,\"h\":8,\"i\":\"21\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_10\"},{\"panelIndex\":\"22\",\"gridData\":{\"x\":32,\"y\":0,\"w\":16,\"h\":7,\"i\":\"22\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_11\"},{\"panelIndex\":\"23\",\"gridData\":{\"x\":0,\"y\":70,\"w\":24,\"h\":15,\"i\":\"23\"},\"embeddableConfig\":{\"mapCenter\":[42.19556096274418,9.536742995308601e-7],\"mapZoom\":1},\"version\":\"6.3.0\",\"panelRefName\":\"panel_12\"},{\"panelIndex\":\"25\",\"gridData\":{\"x\":0,\"y\":19,\"w\":10,\"h\":8,\"i\":\"25\"},\"embeddableConfig\":{\"vis\":{\"defaultColors\":{\"0 - 50\":\"rgb(247,251,255)\",\"100 - 150\":\"rgb(107,174,214)\",\"150 - 200\":\"rgb(33,113,181)\",\"200 - 250\":\"rgb(8,48,107)\",\"50 - 100\":\"rgb(198,219,239)\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_13\"},{\"panelIndex\":\"27\",\"gridData\":{\"x\":0,\"y\":27,\"w\":10,\"h\":8,\"i\":\"27\"},\"embeddableConfig\":{\"vis\":{\"defaultColors\":{\"0 - 50\":\"rgb(247,251,255)\",\"100 - 150\":\"rgb(107,174,214)\",\"150 - 200\":\"rgb(33,113,181)\",\"200 - 250\":\"rgb(8,48,107)\",\"50 - 100\":\"rgb(198,219,239)\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_14\"},{\"panelIndex\":\"28\",\"gridData\":{\"x\":0,\"y\":47,\"w\":24,\"h\":15,\"i\":\"28\"},\"embeddableConfig\":{\"vis\":{\"defaultColors\":{\"0 -* Connection #0 to host 69c72adb58fa46c69a01afdf4a6cbfd3.us-west1.gcp.cloud.es.io left intact\n 11\":\"rgb(247,251,255)\",\"11 - 22\":\"rgb(208,225,242)\",\"22 - 33\":\"rgb(148,196,223)\",\"33 - 44\":\"rgb(74,152,201)\",\"44 - 55\":\"rgb(23,100,171)\"},\"legendOpen\":false}},\"version\":\"6.3.0\",\"panelRefName\":\"panel_15\"},{\"panelIndex\":\"29\",\"gridData\":{\"x\":40,\"y\":7,\"w\":8,\"h\":6,\"i\":\"29\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_16\"},{\"panelIndex\":\"30\",\"gridData\":{\"x\":40,\"y\":13,\"w\":8,\"h\":6,\"i\":\"30\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_17\"},{\"panelIndex\":\"31\",\"gridData\":{\"x\":24,\"y\":47,\"w\":24,\"h\":15,\"i\":\"31\"},\"embeddableConfig\":{},\"version\":\"6.3.0\",\"panelRefName\":\"panel_18\"}]", + "optionsJSON": "{\"hidePanelTitles\":false,\"useMargins\":true}", + "version": 1, + "timeRestore": true, + "timeTo": "now", + "timeFrom": "now-24h", + "refreshInterval": { + "display": "15 minutes", + "pause": false, + "section": 2, + "value": 900000 + }, + "kibanaSavedObjectMeta": { + "searchSourceJSON": "{\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"filter\":[],\"highlightAll\":true,\"version\":true}" + } + }, + "references": [ + { + "name": "panel_0", + "type": "visualization", + "id": "aeb212e0-4c84-11e8-b3d7-01146121b73d" + }, + . . . + { + "name": "panel_18", + "type": "visualization", + "id": "ed78a660-53a0-11e8-acbd-0be0ad9d822b" + } + ], + "migrationVersion": { + "dashboard": "7.0.0" + } +} +-------------------------------------------------- diff --git a/docs/api/saved-objects/import.asciidoc b/docs/api/saved-objects/import.asciidoc index 0331f23284352..5b4c5016be4ed 100644 --- a/docs/api/saved-objects/import.asciidoc +++ b/docs/api/saved-objects/import.asciidoc @@ -11,8 +11,16 @@ experimental[] Create sets of {kib} saved objects from a file created by the exp `POST /api/saved_objects/_import` +`POST /s//api/saved_objects/_import` + +[[saved-objects-api-import-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + [[saved-objects-api-import-query-params]] -==== Query parameter +==== Query parameters `overwrite`:: (Optional, boolean) Overwrites saved objects. @@ -28,19 +36,19 @@ The request body must include the multipart/form-data type. [[saved-objects-api-import-response-body]] ==== Response body -`success`:: - Top-level property that indicates if the import was successful. +`success`:: + Top-level property that indicates if the import was successful. -`successCount`:: +`successCount`:: Indicates the number of successfully imported records. `errors`:: (array) Indicates the import was unsuccessful and specifies the objects that failed to import. - + [[saved-objects-api-import-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. ==== Examples diff --git a/docs/api/saved-objects/resolve_import_errors.asciidoc b/docs/api/saved-objects/resolve_import_errors.asciidoc index b97e5c2a617b0..b64e5deb361b2 100644 --- a/docs/api/saved-objects/resolve_import_errors.asciidoc +++ b/docs/api/saved-objects/resolve_import_errors.asciidoc @@ -4,9 +4,9 @@ Resolve import errors ++++ -experimental[] Resolve errors from the import API. +experimental[] Resolve errors from the import API. -To resolve errors, you can: +To resolve errors, you can: * Retry certain saved objects @@ -19,6 +19,14 @@ To resolve errors, you can: `POST /api/saved_objects/_resolve_import_errors` +`POST /s//api/saved_objects/_resolve_import_errors` + +[[saved-objects-api-resolve-import-errors-path-params]] +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. + [[saved-objects-api-resolve-import-errors-request-body]] ==== Request body @@ -33,19 +41,19 @@ The request body must include the multipart/form-data type. [[saved-objects-api-resolve-import-errors-response-body]] ==== Response body -`success`:: +`success`:: Top-level property that indicates if the errors successfully resolved. - -`successCount`:: + +`successCount`:: Indicates the number of successfully resolved records. `errors`:: (array) Specifies the objects that failed to resolve. - + [[saved-objects-api-resolve-import-errors-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. [[saved-objects-api-resolve-import-errors-example]] diff --git a/docs/api/saved-objects/update.asciidoc b/docs/api/saved-objects/update.asciidoc index 5c4bb98d09228..99a9bd4ad15bb 100644 --- a/docs/api/saved-objects/update.asciidoc +++ b/docs/api/saved-objects/update.asciidoc @@ -11,8 +11,13 @@ experimental[] Update the attributes for existing {kib} saved objects. `PUT /api/saved_objects//` +`PUT /s//api/saved_objects//` + [[saved-objects-api-update-path-params]] -==== Path Parameters +==== Path parameters + +`space_id`:: + (Optional, string) An identifier for the space. If `space_id` is not provided in the URL, the default space is used. `type`:: (Required, string) Valid options include `visualization`, `dashboard`, `search`, `index-pattern`, `config`, and `timelion-sheet`. @@ -21,7 +26,7 @@ experimental[] Update the attributes for existing {kib} saved objects. (Required, string) The object ID to update. [[saved-objects-api-update-request-body]] -==== Request Body +==== Request body `attributes`:: (Required, object) The data to persist. @@ -30,11 +35,11 @@ WARNING: When you update, attributes are not validated, which allows you to pass `references`:: (Optional, array) Objects with `name`, `id`, and `type` properties that describe the other saved objects this object references. To refer to the other saved object, use `name` in the attributes, but never the `id`, which automatically updates during migrations or import/export. - + [[saved-objects-api-update-errors-codes]] ==== Response code -`200`:: +`200`:: Indicates a successful call. [[saved-objects-api-update-example]] diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md deleted file mode 100644 index 01f8d4c951465..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [body](./kibana-plugin-public.httperrorresponse.body.md) - -## HttpErrorResponse.body property - -Signature: - -```typescript -body?: HttpBody; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md index 1955bb57c50bf..aa669df796a09 100644 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md +++ b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md @@ -8,15 +8,12 @@ Signature: ```typescript -export interface HttpErrorResponse +export interface HttpErrorResponse extends HttpResponse ``` ## Properties | Property | Type | Description | | --- | --- | --- | -| [body](./kibana-plugin-public.httperrorresponse.body.md) | HttpBody | | | [error](./kibana-plugin-public.httperrorresponse.error.md) | Error | IHttpFetchError | | -| [request](./kibana-plugin-public.httperrorresponse.request.md) | Request | | -| [response](./kibana-plugin-public.httperrorresponse.response.md) | Response | | diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md deleted file mode 100644 index fcb33fc12fbf4..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [request](./kibana-plugin-public.httperrorresponse.request.md) - -## HttpErrorResponse.request property - -Signature: - -```typescript -request?: Request; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md deleted file mode 100644 index e6c7f9675a1d7..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [response](./kibana-plugin-public.httperrorresponse.response.md) - -## HttpErrorResponse.response property - -Signature: - -```typescript -response?: Response; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md index 6f4205f3362fe..ca43ea31f0e2e 100644 --- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md +++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md @@ -9,7 +9,7 @@ Define an interceptor to be executed after a response is received. Signature: ```typescript -response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; +response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; ``` ## Parameters @@ -21,5 +21,5 @@ response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Pro Returns: -`Promise | HttpResponse | void` +`Promise | InterceptedHttpResponse | void` diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md index 1e7cd5e61186e..b8abd50e45461 100644 --- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md +++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md @@ -9,7 +9,7 @@ Define an interceptor to be executed if a response interceptor throws an error o Signature: ```typescript -responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; +responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; ``` ## Parameters @@ -21,5 +21,5 @@ responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptC Returns: -`Promise | HttpResponse | void` +`Promise | InterceptedHttpResponse | void` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md b/docs/development/core/public/kibana-plugin-public.httpresponse.body.md deleted file mode 100644 index c590c9ec49d1b..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [body](./kibana-plugin-public.httpresponse.body.md) - -## HttpResponse.body property - -Signature: - -```typescript -body?: HttpBody; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.md b/docs/development/core/public/kibana-plugin-public.httpresponse.md index b2ec48fd4d6b5..e44515cc8a1e0 100644 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.md +++ b/docs/development/core/public/kibana-plugin-public.httpresponse.md @@ -8,14 +8,12 @@ Signature: ```typescript -export interface HttpResponse +export interface HttpResponse extends InterceptedHttpResponse ``` ## Properties | Property | Type | Description | | --- | --- | --- | -| [body](./kibana-plugin-public.httpresponse.body.md) | HttpBody | | -| [request](./kibana-plugin-public.httpresponse.request.md) | Request | | -| [response](./kibana-plugin-public.httpresponse.response.md) | Response | | +| [request](./kibana-plugin-public.httpresponse.request.md) | Readonly<Request> | | diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md index 4cb1ded29152e..84ab1bc7af853 100644 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md +++ b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md @@ -7,5 +7,5 @@ Signature: ```typescript -request?: Request; +request: Readonly; ``` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md b/docs/development/core/public/kibana-plugin-public.httpresponse.response.md deleted file mode 100644 index 44c8eb4295f1c..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [response](./kibana-plugin-public.httpresponse.response.md) - -## HttpResponse.response property - -Signature: - -```typescript -response?: Response; -``` diff --git a/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.md b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.md index f33688affe8d5..0be3b58179209 100644 --- a/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.md +++ b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.md @@ -16,6 +16,8 @@ export interface IHttpFetchError extends Error | Property | Type | Description | | --- | --- | --- | | [body](./kibana-plugin-public.ihttpfetcherror.body.md) | any | | +| [req](./kibana-plugin-public.ihttpfetcherror.req.md) | Request | | | [request](./kibana-plugin-public.ihttpfetcherror.request.md) | Request | | +| [res](./kibana-plugin-public.ihttpfetcherror.res.md) | Response | | | [response](./kibana-plugin-public.ihttpfetcherror.response.md) | Response | | diff --git a/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.req.md b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.req.md new file mode 100644 index 0000000000000..1d20aa5ecd416 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.req.md @@ -0,0 +1,16 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [IHttpFetchError](./kibana-plugin-public.ihttpfetcherror.md) > [req](./kibana-plugin-public.ihttpfetcherror.req.md) + +## IHttpFetchError.req property + +> Warning: This API is now obsolete. +> +> Provided for legacy compatibility. Prefer the `request` property instead. +> + +Signature: + +```typescript +readonly req: Request; +``` diff --git a/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.res.md b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.res.md new file mode 100644 index 0000000000000..291b28f6a4250 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.ihttpfetcherror.res.md @@ -0,0 +1,16 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [IHttpFetchError](./kibana-plugin-public.ihttpfetcherror.md) > [res](./kibana-plugin-public.ihttpfetcherror.res.md) + +## IHttpFetchError.res property + +> Warning: This API is now obsolete. +> +> Provided for legacy compatibility. Prefer the `response` property instead. +> + +Signature: + +```typescript +readonly res?: Response; +``` diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md new file mode 100644 index 0000000000000..fc6d34c0b74f2 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [body](./kibana-plugin-public.interceptedhttpresponse.body.md) + +## InterceptedHttpResponse.body property + +Signature: + +```typescript +body?: HttpBody; +``` diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md new file mode 100644 index 0000000000000..c4a7f4d6b2afa --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md @@ -0,0 +1,20 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) + +## InterceptedHttpResponse interface + + +Signature: + +```typescript +export interface InterceptedHttpResponse +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [body](./kibana-plugin-public.interceptedhttpresponse.body.md) | HttpBody | | +| [response](./kibana-plugin-public.interceptedhttpresponse.response.md) | Response | | + diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md new file mode 100644 index 0000000000000..dceb55113ee78 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [response](./kibana-plugin-public.interceptedhttpresponse.response.md) + +## InterceptedHttpResponse.response property + +Signature: + +```typescript +response?: Response; +``` diff --git a/docs/development/core/public/kibana-plugin-public.md b/docs/development/core/public/kibana-plugin-public.md index 3adf68ffb35a8..e787621c3aaf9 100644 --- a/docs/development/core/public/kibana-plugin-public.md +++ b/docs/development/core/public/kibana-plugin-public.md @@ -61,6 +61,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [IContextContainer](./kibana-plugin-public.icontextcontainer.md) | An object that handles registration of context providers and configuring handlers with context. | | [IHttpFetchError](./kibana-plugin-public.ihttpfetcherror.md) | | | [IHttpInterceptController](./kibana-plugin-public.ihttpinterceptcontroller.md) | Used to halt a request Promise chain in a [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md). | +| [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) | | | [LegacyCoreSetup](./kibana-plugin-public.legacycoresetup.md) | Setup interface exposed to the legacy platform via the ui/new_platform module. | | [LegacyCoreStart](./kibana-plugin-public.legacycorestart.md) | Start interface exposed to the legacy platform via the ui/new_platform module. | | [LegacyNavLink](./kibana-plugin-public.legacynavlink.md) | | @@ -79,6 +80,8 @@ The plugin integrates with the core system via lifecycle events: `setup` | [SavedObjectsBatchResponse](./kibana-plugin-public.savedobjectsbatchresponse.md) | | | [SavedObjectsBulkCreateObject](./kibana-plugin-public.savedobjectsbulkcreateobject.md) | | | [SavedObjectsBulkCreateOptions](./kibana-plugin-public.savedobjectsbulkcreateoptions.md) | | +| [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) | | +| [SavedObjectsBulkUpdateOptions](./kibana-plugin-public.savedobjectsbulkupdateoptions.md) | | | [SavedObjectsCreateOptions](./kibana-plugin-public.savedobjectscreateoptions.md) | | | [SavedObjectsFindOptions](./kibana-plugin-public.savedobjectsfindoptions.md) | | | [SavedObjectsFindResponsePublic](./kibana-plugin-public.savedobjectsfindresponsepublic.md) | Return type of the Saved Objects find() method.\*Note\*: this type is different between the Public and Server Saved Objects clients. | diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.attributes.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.attributes.md new file mode 100644 index 0000000000000..235c896532beb --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.attributes.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) > [attributes](./kibana-plugin-public.savedobjectsbulkupdateobject.attributes.md) + +## SavedObjectsBulkUpdateObject.attributes property + +Signature: + +```typescript +attributes: T; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.id.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.id.md new file mode 100644 index 0000000000000..8fbece1de7aa1 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.id.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) > [id](./kibana-plugin-public.savedobjectsbulkupdateobject.id.md) + +## SavedObjectsBulkUpdateObject.id property + +Signature: + +```typescript +id: string; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.md new file mode 100644 index 0000000000000..91688c01df34c --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.md @@ -0,0 +1,23 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) + +## SavedObjectsBulkUpdateObject interface + + +Signature: + +```typescript +export interface SavedObjectsBulkUpdateObject +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [attributes](./kibana-plugin-public.savedobjectsbulkupdateobject.attributes.md) | T | | +| [id](./kibana-plugin-public.savedobjectsbulkupdateobject.id.md) | string | | +| [references](./kibana-plugin-public.savedobjectsbulkupdateobject.references.md) | SavedObjectReference[] | | +| [type](./kibana-plugin-public.savedobjectsbulkupdateobject.type.md) | string | | +| [version](./kibana-plugin-public.savedobjectsbulkupdateobject.version.md) | string | | + diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.references.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.references.md new file mode 100644 index 0000000000000..3949eb809c3a0 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.references.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) > [references](./kibana-plugin-public.savedobjectsbulkupdateobject.references.md) + +## SavedObjectsBulkUpdateObject.references property + +Signature: + +```typescript +references?: SavedObjectReference[]; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.type.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.type.md new file mode 100644 index 0000000000000..b3bd0f7eb2580 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.type.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) > [type](./kibana-plugin-public.savedobjectsbulkupdateobject.type.md) + +## SavedObjectsBulkUpdateObject.type property + +Signature: + +```typescript +type: string; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.version.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.version.md new file mode 100644 index 0000000000000..7608bc7aff909 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateobject.version.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-public.savedobjectsbulkupdateobject.md) > [version](./kibana-plugin-public.savedobjectsbulkupdateobject.version.md) + +## SavedObjectsBulkUpdateObject.version property + +Signature: + +```typescript +version?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.md new file mode 100644 index 0000000000000..8a2ecefb73283 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateOptions](./kibana-plugin-public.savedobjectsbulkupdateoptions.md) + +## SavedObjectsBulkUpdateOptions interface + + +Signature: + +```typescript +export interface SavedObjectsBulkUpdateOptions +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [namespace](./kibana-plugin-public.savedobjectsbulkupdateoptions.namespace.md) | string | | + diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.namespace.md b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.namespace.md new file mode 100644 index 0000000000000..0079e56684b75 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsbulkupdateoptions.namespace.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsBulkUpdateOptions](./kibana-plugin-public.savedobjectsbulkupdateoptions.md) > [namespace](./kibana-plugin-public.savedobjectsbulkupdateoptions.namespace.md) + +## SavedObjectsBulkUpdateOptions.namespace property + +Signature: + +```typescript +namespace?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.bulkupdate.md b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.bulkupdate.md new file mode 100644 index 0000000000000..f39638575beb1 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.bulkupdate.md @@ -0,0 +1,26 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [SavedObjectsClient](./kibana-plugin-public.savedobjectsclient.md) > [bulkUpdate](./kibana-plugin-public.savedobjectsclient.bulkupdate.md) + +## SavedObjectsClient.bulkUpdate() method + +Update multiple documents at once + +Signature: + +```typescript +bulkUpdate(objects?: SavedObjectsBulkUpdateObject[]): Promise>; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| objects | SavedObjectsBulkUpdateObject[] | | + +Returns: + +`Promise>` + +The result of the update operation containing both failed and updated saved objects. + diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md index 1d0d942a24c0a..50451b813a61c 100644 --- a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md @@ -27,6 +27,7 @@ export declare class SavedObjectsClient | Method | Modifiers | Description | | --- | --- | --- | +| [bulkUpdate(objects)](./kibana-plugin-public.savedobjectsclient.bulkupdate.md) | | Update multiple documents at once | | [update(type, id, attributes, { version, migrationVersion, references })](./kibana-plugin-public.savedobjectsclient.update.md) | | Updates an object | ## Remarks diff --git a/docs/development/core/server/kibana-plugin-server.md b/docs/development/core/server/kibana-plugin-server.md index 7b302838995c1..2f81afacf4bb4 100644 --- a/docs/development/core/server/kibana-plugin-server.md +++ b/docs/development/core/server/kibana-plugin-server.md @@ -89,6 +89,8 @@ The plugin integrates with the core system via lifecycle events: `setup` | [SavedObjectsBulkCreateObject](./kibana-plugin-server.savedobjectsbulkcreateobject.md) | | | [SavedObjectsBulkGetObject](./kibana-plugin-server.savedobjectsbulkgetobject.md) | | | [SavedObjectsBulkResponse](./kibana-plugin-server.savedobjectsbulkresponse.md) | | +| [SavedObjectsBulkUpdateObject](./kibana-plugin-server.savedobjectsbulkupdateobject.md) | | +| [SavedObjectsBulkUpdateResponse](./kibana-plugin-server.savedobjectsbulkupdateresponse.md) | | | [SavedObjectsClientProviderOptions](./kibana-plugin-server.savedobjectsclientprovideroptions.md) | Options to control the creation of the Saved Objects Client. | | [SavedObjectsClientWrapperOptions](./kibana-plugin-server.savedobjectsclientwrapperoptions.md) | Options passed to each SavedObjectsClientWrapperFactory to aid in creating the wrapper instance. | | [SavedObjectsCreateOptions](./kibana-plugin-server.savedobjectscreateoptions.md) | | diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.attributes.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.attributes.md new file mode 100644 index 0000000000000..3de73d133d7a7 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.attributes.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-server.savedobjectsbulkupdateobject.md) > [attributes](./kibana-plugin-server.savedobjectsbulkupdateobject.attributes.md) + +## SavedObjectsBulkUpdateObject.attributes property + +The data for a Saved Object is stored as an object in the `attributes` property. + +Signature: + +```typescript +attributes: Partial; +``` diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.id.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.id.md new file mode 100644 index 0000000000000..88bc9f306b26c --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.id.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-server.savedobjectsbulkupdateobject.md) > [id](./kibana-plugin-server.savedobjectsbulkupdateobject.id.md) + +## SavedObjectsBulkUpdateObject.id property + +The ID of this Saved Object, guaranteed to be unique for all objects of the same `type` + +Signature: + +```typescript +id: string; +``` diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.md new file mode 100644 index 0000000000000..b84bbe0a17344 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.md @@ -0,0 +1,21 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-server.savedobjectsbulkupdateobject.md) + +## SavedObjectsBulkUpdateObject interface + + +Signature: + +```typescript +export interface SavedObjectsBulkUpdateObject extends Pick +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [attributes](./kibana-plugin-server.savedobjectsbulkupdateobject.attributes.md) | Partial<T> | The data for a Saved Object is stored as an object in the attributes property. | +| [id](./kibana-plugin-server.savedobjectsbulkupdateobject.id.md) | string | The ID of this Saved Object, guaranteed to be unique for all objects of the same type | +| [type](./kibana-plugin-server.savedobjectsbulkupdateobject.type.md) | string | The type of this Saved Object. Each plugin can define it's own custom Saved Object types. | + diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.type.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.type.md new file mode 100644 index 0000000000000..d2d46b24ea8be --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateobject.type.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateObject](./kibana-plugin-server.savedobjectsbulkupdateobject.md) > [type](./kibana-plugin-server.savedobjectsbulkupdateobject.type.md) + +## SavedObjectsBulkUpdateObject.type property + +The type of this Saved Object. Each plugin can define it's own custom Saved Object types. + +Signature: + +```typescript +type: string; +``` diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.md new file mode 100644 index 0000000000000..03707bd14a3eb --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateResponse](./kibana-plugin-server.savedobjectsbulkupdateresponse.md) + +## SavedObjectsBulkUpdateResponse interface + + +Signature: + +```typescript +export interface SavedObjectsBulkUpdateResponse +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [saved\_objects](./kibana-plugin-server.savedobjectsbulkupdateresponse.saved_objects.md) | Array<SavedObjectsUpdateResponse<T>> | | + diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.saved_objects.md b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.saved_objects.md new file mode 100644 index 0000000000000..0ca54ca176292 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsbulkupdateresponse.saved_objects.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsBulkUpdateResponse](./kibana-plugin-server.savedobjectsbulkupdateresponse.md) > [saved\_objects](./kibana-plugin-server.savedobjectsbulkupdateresponse.saved_objects.md) + +## SavedObjectsBulkUpdateResponse.saved\_objects property + +Signature: + +```typescript +saved_objects: Array>; +``` diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsclient.bulkupdate.md b/docs/development/core/server/kibana-plugin-server.savedobjectsclient.bulkupdate.md new file mode 100644 index 0000000000000..107e71959f706 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsclient.bulkupdate.md @@ -0,0 +1,25 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsClient](./kibana-plugin-server.savedobjectsclient.md) > [bulkUpdate](./kibana-plugin-server.savedobjectsclient.bulkupdate.md) + +## SavedObjectsClient.bulkUpdate() method + +Bulk Updates multiple SavedObject at once + +Signature: + +```typescript +bulkUpdate(objects: Array>, options?: SavedObjectsBaseOptions): Promise>; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| objects | Array<SavedObjectsBulkUpdateObject<T>> | | +| options | SavedObjectsBaseOptions | | + +Returns: + +`Promise>` + diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsclient.md b/docs/development/core/server/kibana-plugin-server.savedobjectsclient.md index 0081c729fe10a..cc00934a1e1fd 100644 --- a/docs/development/core/server/kibana-plugin-server.savedobjectsclient.md +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsclient.md @@ -30,6 +30,7 @@ export declare class SavedObjectsClient | --- | --- | --- | | [bulkCreate(objects, options)](./kibana-plugin-server.savedobjectsclient.bulkcreate.md) | | Persists multiple documents batched together as a single request | | [bulkGet(objects, options)](./kibana-plugin-server.savedobjectsclient.bulkget.md) | | Returns an array of objects by id | +| [bulkUpdate(objects, options)](./kibana-plugin-server.savedobjectsclient.bulkupdate.md) | | Bulk Updates multiple SavedObject at once | | [create(type, attributes, options)](./kibana-plugin-server.savedobjectsclient.create.md) | | Persists a SavedObject | | [delete(type, id, options)](./kibana-plugin-server.savedobjectsclient.delete.md) | | Deletes a SavedObject | | [find(options)](./kibana-plugin-server.savedobjectsclient.find.md) | | Find all SavedObjects matching the search query | diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.md b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.md index 577fd632be9cb..7fcd362e937a0 100644 --- a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.md +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.md @@ -15,6 +15,6 @@ export interface SavedObjectsUpdateOptions extends SavedObjectsBaseOptions | Property | Type | Description | | --- | --- | --- | -| [references](./kibana-plugin-server.savedobjectsupdateoptions.references.md) | SavedObjectReference[] | | -| [version](./kibana-plugin-server.savedobjectsupdateoptions.version.md) | string | Ensures version matches that of persisted object | +| [references](./kibana-plugin-server.savedobjectsupdateoptions.references.md) | SavedObjectReference[] | A reference to another saved object. | +| [version](./kibana-plugin-server.savedobjectsupdateoptions.version.md) | string | An opaque version number which changes on each successful write operation. Can be used for implementing optimistic concurrency control. | diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.references.md b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.references.md index 500be57041756..76eca68dba37f 100644 --- a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.references.md +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.references.md @@ -4,6 +4,8 @@ ## SavedObjectsUpdateOptions.references property +A reference to another saved object. + Signature: ```typescript diff --git a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.version.md b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.version.md index 8461181222238..6e399b343556b 100644 --- a/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.version.md +++ b/docs/development/core/server/kibana-plugin-server.savedobjectsupdateoptions.version.md @@ -4,7 +4,7 @@ ## SavedObjectsUpdateOptions.version property -Ensures version matches that of persisted object +An opaque version number which changes on each successful write operation. Can be used for implementing optimistic concurrency control. Signature: diff --git a/docs/limitations.asciidoc b/docs/limitations.asciidoc index 62dd2c7fc4972..0b26a3cdcf71a 100644 --- a/docs/limitations.asciidoc +++ b/docs/limitations.asciidoc @@ -11,9 +11,9 @@ These {stack} features also have limitations that affect {kib}: -* {stack-ov}/watcher-limitations.html[Alerting] +* {ref}/watcher-limitations.html[Alerting] * {stack-ov}/ml-limitations.html[Machine learning] -* {stack-ov}/security-limitations.html[Security] +* {ref}/security-limitations.html[Security] -- diff --git a/docs/management/dashboard_only_mode/index.asciidoc b/docs/management/dashboard_only_mode/index.asciidoc index 3040c73b468e1..97ac4392827dd 100644 --- a/docs/management/dashboard_only_mode/index.asciidoc +++ b/docs/management/dashboard_only_mode/index.asciidoc @@ -58,7 +58,7 @@ does not provide access to data indices. You must also assign the user a role that grants `read` access to each index you are using. Use *Management > Security > Roles* to create or edit a role and assign index privileges. -For information on roles and privileges, see {stack-ov}/authorization.html[User authorization]. +For information on roles and privileges, see {ref}/authorization.html[User authorization]. [role="screenshot"] image:management/dashboard_only_mode/images/custom_dashboard_mode_role.png["Dashboard Only mode has no editing controls"] diff --git a/docs/management/managing-licenses.asciidoc b/docs/management/managing-licenses.asciidoc index 221cf9a0408ac..bbe4b3b68e03b 100644 --- a/docs/management/managing-licenses.asciidoc +++ b/docs/management/managing-licenses.asciidoc @@ -25,4 +25,4 @@ license, extend the trial, or purchase a subscription. TIP: If {security-features} are enabled, before you revert to a Basic license or install a Gold or Platinum license, you must configure Transport Layer Security (TLS) in {es}. -See {stack-ov}/encrypting-communications.html[Encrypting communications]. \ No newline at end of file +See {ref}/encrypting-communications.html[Encrypting communications]. \ No newline at end of file diff --git a/docs/management/managing-remote-clusters.asciidoc b/docs/management/managing-remote-clusters.asciidoc index 8717476ac5d2e..a776cdf0334cb 100644 --- a/docs/management/managing-remote-clusters.asciidoc +++ b/docs/management/managing-remote-clusters.asciidoc @@ -10,7 +10,7 @@ Before using these features, you should be familiar with the following concepts: * {ref}/xpack-ccr.html[{ccr-cap}] * {ref}/modules-cross-cluster-search.html[{ccs-cap}] -* {stack-ov}/cross-cluster-configuring.html[Cross-cluster security requirements] +* {ref}/cross-cluster-configuring.html[Cross-cluster security requirements] [float] [[managing-remote-clusters]] diff --git a/docs/management/watcher-ui/index.asciidoc b/docs/management/watcher-ui/index.asciidoc index 767f9029907fd..79db96d759aa5 100644 --- a/docs/management/watcher-ui/index.asciidoc +++ b/docs/management/watcher-ui/index.asciidoc @@ -31,7 +31,7 @@ watch—input, schedule, condition, and actions. === Watcher security If the {es} {security-features} are enabled, you must have the -{stack-ov}/security-privileges.html[`manage_watcher` or `monitor_watcher`] +{ref}/security-privileges.html[`manage_watcher` or `monitor_watcher`] cluster privileges to use Watcher in {kib}. Alternately, you can have the built-in `kibana_user` role diff --git a/docs/settings/monitoring-settings.asciidoc b/docs/settings/monitoring-settings.asciidoc index f08ae8e942f4a..68dd9a8b3cefb 100644 --- a/docs/settings/monitoring-settings.asciidoc +++ b/docs/settings/monitoring-settings.asciidoc @@ -25,7 +25,7 @@ from Logstash, you configure in `logstash.yml`. For more information, see -{stack-ov}/xpack-monitoring.html[Monitoring the Elastic Stack]. +{ref}/monitor-elasticsearch-cluster.html[Monitor a cluster]. [float] [[monitoring-general-settings]] diff --git a/docs/settings/ssl-settings.asciidoc b/docs/settings/ssl-settings.asciidoc index 4344625833947..5341d3543e7c6 100644 --- a/docs/settings/ssl-settings.asciidoc +++ b/docs/settings/ssl-settings.asciidoc @@ -2,7 +2,7 @@ === {component} TLS/SSL settings You can configure the following TLS/SSL settings. If the settings are not configured, the default values are used. See -{stack-ov}/security-settings.html[Default TLS/SSL Settings]. +{ref}/security-settings.html[Default TLS/SSL Settings]. ifdef::server[] +{ssl-prefix}.ssl.enabled+:: @@ -45,7 +45,7 @@ Java Cryptography Architecture documentation]. Defaults to the value of The following settings are used to specify a private key, certificate, and the trusted certificates that should be used when communicating over an SSL/TLS connection. If none of the settings below are specified, the default values are used. -See {stack-ov}/security-settings.html[Default TLS/SSL settings]. +See {ref}/security-settings.html[Default TLS/SSL settings]. ifdef::server[] A private key and certificate must be configured. @@ -55,7 +55,7 @@ A private key and certificate are optional and would be used if the server requi authentication. endif::server[] If none of the settings below are specified, the defaults values are used. -See {stack-ov}/security-settings.html[Default TLS/SSL settings]. +See {ref}/security-settings.html[Default TLS/SSL settings]. [float] ===== PEM encoded files diff --git a/docs/setup/production.asciidoc b/docs/setup/production.asciidoc index d3d24e8c41da5..67afe0896a0dd 100644 --- a/docs/setup/production.asciidoc +++ b/docs/setup/production.asciidoc @@ -23,8 +23,8 @@ and an Elasticsearch client node on the same machine. For more information, see [[configuring-kibana-shield]] === Using {stack} {security-features} -You can use {stack-ov}/elasticsearch-security.html[{stack} {security-features}] -to control what {es} data users can access through Kibana. +You can use {stack} {security-features} to control what {es} data users can +access through Kibana. When {security-features} are enabled, Kibana users have to log in. They need to have a role granting <> as well as access diff --git a/docs/user/monitoring/configuring-monitoring.asciidoc b/docs/user/monitoring/configuring-monitoring.asciidoc index b4a7273136f37..de9e99117fc99 100644 --- a/docs/user/monitoring/configuring-monitoring.asciidoc +++ b/docs/user/monitoring/configuring-monitoring.asciidoc @@ -15,7 +15,7 @@ You can also use {kib} to <>. To learn about monitoring in general, see -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +{ref}/monitor-elasticsearch-cluster.html[Monitor a cluster]. include::monitoring-kibana.asciidoc[] include::monitoring-metricbeat.asciidoc[] diff --git a/docs/user/monitoring/elasticsearch-details.asciidoc b/docs/user/monitoring/elasticsearch-details.asciidoc index d24f14694565b..2990e965be03c 100644 --- a/docs/user/monitoring/elasticsearch-details.asciidoc +++ b/docs/user/monitoring/elasticsearch-details.asciidoc @@ -27,7 +27,7 @@ highlighted in yellow or red. TIP: Conditions that require your attention are listed at the top of the Clusters page. You can also set up watches to alert you when the status of your cluster changes. To learn how, see -{stack-ov}/watch-cluster-status.html[Watch Your Cluster Health]. +{ref}/watch-cluster-status.html[Watching the status of an {es} cluster]. The panel at the top shows the current cluster statistics, the charts show the search and indexing performance over time, and the table at the bottom shows @@ -145,7 +145,7 @@ You can also see advanced information, which contains the results from the [role="screenshot"] image::user/monitoring/images/monitoring-ccr-shard.png["Cross-cluster replication shard details",link="images/monitoring-ccr-shard.png"] -For more information, see {stack-ov}/xpack-ccr.html[Cross-cluster replication]. +For more information, see {ref}/xpack-ccr.html[{ccr-cap}]. [float] [[logs-monitor-page]] diff --git a/docs/user/monitoring/index.asciidoc b/docs/user/monitoring/index.asciidoc index 1e8bd7fd98de9..edc572a56434e 100644 --- a/docs/user/monitoring/index.asciidoc +++ b/docs/user/monitoring/index.asciidoc @@ -21,7 +21,7 @@ NOTE: Watcher must be enabled to view cluster alerts. If you have a Basic license, Top Cluster Alerts are not displayed. For more information, see <> and -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +{ref}/monitor-elasticsearch-cluster.html[Monitor a cluster]. -- diff --git a/docs/user/monitoring/monitoring-kibana.asciidoc b/docs/user/monitoring/monitoring-kibana.asciidoc index ade964e0e9a46..d7af0d5c420a1 100644 --- a/docs/user/monitoring/monitoring-kibana.asciidoc +++ b/docs/user/monitoring/monitoring-kibana.asciidoc @@ -13,7 +13,7 @@ which ultimately routes them to the monitoring cluster. For an alternative method, see <>. To learn about monitoring in general, see -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +{ref}/monitor-elasticsearch-cluster.html[Monitor a cluster]. . Set the `xpack.monitoring.collection.enabled` setting to `true` on each node in the production cluster. By default, it is is disabled (`false`). diff --git a/docs/user/monitoring/monitoring-metricbeat.asciidoc b/docs/user/monitoring/monitoring-metricbeat.asciidoc index 94799bd147e3b..f03a2ce1525a4 100644 --- a/docs/user/monitoring/monitoring-metricbeat.asciidoc +++ b/docs/user/monitoring/monitoring-metricbeat.asciidoc @@ -13,7 +13,7 @@ production cluster as described in <>. image::user/monitoring/images/metricbeat.png[Example monitoring architecture] To learn about monitoring in general, see -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +{ref}/monitor-elasticsearch-cluster.html[Monitor a cluster]. //NOTE: The tagged regions are re-used in the Stack Overview. @@ -134,9 +134,9 @@ If the Elastic {security-features} are enabled, you must also provide a user ID and password so that {metricbeat} can collect metrics successfully: .. Create a user on the production cluster that has the -`remote_monitoring_collector` {stack-ov}/built-in-roles.html[built-in role]. +`remote_monitoring_collector` {ref}/built-in-roles.html[built-in role]. Alternatively, use the `remote_monitoring_user` -{stack-ov}/built-in-users.html[built-in user]. +{ref}/built-in-users.html[built-in user]. .. Add the `username` and `password` settings to the {kib} module configuration file. @@ -197,9 +197,9 @@ must provide a valid user ID and password so that {metricbeat} can send metrics successfully: .. Create a user on the monitoring cluster that has the -`remote_monitoring_agent` {stack-ov}/built-in-roles.html[built-in role]. +`remote_monitoring_agent` {ref}/built-in-roles.html[built-in role]. Alternatively, use the `remote_monitoring_user` -{stack-ov}/built-in-users.html[built-in user]. +{ref}/built-in-users.html[built-in user]. .. Add the `username` and `password` settings to the {es} output information in the {metricbeat} configuration file. diff --git a/docs/user/monitoring/viewing-metrics.asciidoc b/docs/user/monitoring/viewing-metrics.asciidoc index b4718d07491e1..61bcb9a49c901 100644 --- a/docs/user/monitoring/viewing-metrics.asciidoc +++ b/docs/user/monitoring/viewing-metrics.asciidoc @@ -41,7 +41,7 @@ default value, in the `kibana.yml` file. For more information, see must provide a user ID and password so {kib} can retrieve the data. .. Create a user that has the `monitoring_user` -{stack-ov}/built-in-roles.html[built-in role] on the monitoring cluster. +{ref}/built-in-roles.html[built-in role] on the monitoring cluster. .. Add the `xpack.monitoring.elasticsearch.username` and `xpack.monitoring.elasticsearch.password` settings in the `kibana.yml` file. @@ -64,7 +64,7 @@ remote monitoring cluster, you must use credentials that are valid on both the -- .. Create users that have the `monitoring_user` and `kibana_user` -{stack-ov}/built-in-roles.html[built-in roles]. +{ref}/built-in-roles.html[built-in roles]. . Open {kib} in your web browser. + diff --git a/docs/user/reporting/gs-index.asciidoc b/docs/user/reporting/gs-index.asciidoc index 61e1acce0c87f..87918ee76340e 100644 --- a/docs/user/reporting/gs-index.asciidoc +++ b/docs/user/reporting/gs-index.asciidoc @@ -16,7 +16,7 @@ The following Reporting button appears in the {kib} toolbar: image:images/reporting.jpg["Reporting",link="reporting.jpg"] -You can also {stack-ov}/automating-report-generation.html[generate reports automatically]. +You can also <>. IMPORTANT: Reports are stored in the `.reporting-*` indices. Any user with access to these indices has access to every report generated by all users. diff --git a/docs/user/reporting/watch-example.asciidoc b/docs/user/reporting/watch-example.asciidoc index 6e147ccdf9ee6..4f5f011d41074 100644 --- a/docs/user/reporting/watch-example.asciidoc +++ b/docs/user/reporting/watch-example.asciidoc @@ -59,4 +59,4 @@ report from the Kibana UI. NOTE: Reporting is integrated with Watcher only as an email attachment type. For more information about configuring watches, see -{stack-ov}/how-watcher-works.html[How Watcher Works]. +{ref}/how-watcher-works.html[How Watcher works]. diff --git a/docs/user/security/audit-logging.asciidoc b/docs/user/security/audit-logging.asciidoc index bca2df5d06572..f72ae0dcf9c93 100644 --- a/docs/user/security/audit-logging.asciidoc +++ b/docs/user/security/audit-logging.asciidoc @@ -12,7 +12,7 @@ audit logging to get a holistic view of all security related events. {kib} defers to {es}'s security model for authentication, data index authorization, and features that are driven by cluster-wide privileges. For more information on enabling audit logging in {es}, see -{stack-ov}/auditing.html[Auditing Security Events]. +{ref}/auditing.html[Auditing security events]. [IMPORTANT] ============================================================================ diff --git a/docs/user/security/authorization/index.asciidoc b/docs/user/security/authorization/index.asciidoc index 05182d9dc687b..803d22a91a309 100644 --- a/docs/user/security/authorization/index.asciidoc +++ b/docs/user/security/authorization/index.asciidoc @@ -2,7 +2,7 @@ [[xpack-security-authorization]] === Granting access to {kib} -The Elastic Stack comes with the `kibana_user` {stack-ov}/built-in-roles.html[built-in role], which you can use to grant access to all Kibana features in all spaces. To grant users access to a subset of spaces or features, you can create a custom role that grants the desired Kibana privileges. +The Elastic Stack comes with the `kibana_user` {ref}/built-in-roles.html[built-in role], which you can use to grant access to all Kibana features in all spaces. To grant users access to a subset of spaces or features, you can create a custom role that grants the desired Kibana privileges. When you assign a user multiple roles, the user receives a union of the roles’ privileges. Therefore, assigning the `kibana_user` role in addition to a custom role that grants Kibana privileges is ineffective because `kibana_user` has access to all the features in all spaces. diff --git a/docs/user/security/index.asciidoc b/docs/user/security/index.asciidoc index 44ffb39a90618..7b7e38d610843 100644 --- a/docs/user/security/index.asciidoc +++ b/docs/user/security/index.asciidoc @@ -7,7 +7,7 @@ security, you can password-protect your data as well as implement more advanced security measures such as encrypting communications, role-based access control, IP filtering, and auditing. For more information, see -{stack-ov}/elasticsearch-security.html[Securing {es} and {kib}] and +{ref}/secure-cluster.html[Secure a cluster] and <>. [float] @@ -16,7 +16,7 @@ auditing. For more information, see You can create and manage users on the *Management -> Security -> Users* page. You can also change their passwords and roles. For more information about authentication and built-in users, see -{stack-ov}/setting-up-authentication.html[Setting up user authentication]. +{ref}/setting-up-authentication.html[Setting up user authentication]. [float] === Roles @@ -25,7 +25,7 @@ You can manage roles on the *Management -> Security -> Roles* page, or use the <>. For more information on configuring roles for {kib}, see <>. For a more holistic overview of configuring roles for the entire stack, -see {stack-ov}/authorization.html[Configuring role-based access control]. +see {ref}/authorization.html[User authorization]. [NOTE] ============================================================================ diff --git a/docs/user/security/securing-kibana.asciidoc b/docs/user/security/securing-kibana.asciidoc index 51c796e7fe9e2..fa11d5925bdbe 100644 --- a/docs/user/security/securing-kibana.asciidoc +++ b/docs/user/security/securing-kibana.asciidoc @@ -40,7 +40,7 @@ APIs and the `.kibana` index. The server does _not_ need access to user indices. The password for the built-in `kibana` user is typically set as part of the {security} configuration process on {es}. For more information, see -{stack-ov}/built-in-users.html[Built-in users]. +{ref}/built-in-users.html[Built-in users]. -- . Set the `xpack.security.encryptionKey` property in the `kibana.yml` diff --git a/package.json b/package.json index 85ca5e2909b24..e61d757195438 100644 --- a/package.json +++ b/package.json @@ -106,13 +106,14 @@ "dependencies": { "@babel/core": "^7.5.5", "@babel/register": "^7.5.5", - "@elastic/charts": "^13.5.1", + "@elastic/charts": "^13.5.4", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "1.0.5", "@elastic/eui": "14.5.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "8.1.1-kibana2", "@elastic/numeral": "2.3.3", + "@elastic/request-crypto": "^1.0.2", "@elastic/ui-ace": "0.2.3", "@hapi/wreck": "^15.0.1", "@kbn/analytics": "1.0.0", @@ -390,7 +391,7 @@ "exit-hook": "^2.2.0", "faker": "1.1.0", "fetch-mock": "^7.3.9", - "geckodriver": "^1.18.0", + "geckodriver": "^1.19.0", "getopts": "^2.2.4", "grunt": "1.0.4", "grunt-available-tasks": "^0.6.3", diff --git a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js index 59e5f4d6faf8a..676992e4dddc8 100644 --- a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js +++ b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js @@ -55,6 +55,32 @@ describe('build query', function () { expect(result.filter).to.eql(expectedESQueries); }); + it('should remove disabled filters', function () { + const filters = [ + { + match_all: {}, + meta: { type: 'match_all', negate: true, disabled: true }, + }, + ]; + + const expectedESQueries = []; + + const result = buildQueryFromFilters(filters); + + expect(result.must_not).to.eql(expectedESQueries); + }); + + it('should remove falsy filters', function () { + const filters = [null, undefined]; + + const expectedESQueries = []; + + const result = buildQueryFromFilters(filters); + + expect(result.must_not).to.eql(expectedESQueries); + expect(result.must).to.eql(expectedESQueries); + }); + it('should place negated filters in the must_not clause', function () { const filters = [ { diff --git a/packages/kbn-es-query/src/es_query/from_filters.js b/packages/kbn-es-query/src/es_query/from_filters.js index b8193b7469a20..10f9cf82fc972 100644 --- a/packages/kbn-es-query/src/es_query/from_filters.js +++ b/packages/kbn-es-query/src/es_query/from_filters.js @@ -60,6 +60,7 @@ const cleanFilter = function (filter) { }; export function buildQueryFromFilters(filters = [], indexPattern, ignoreFilterIfFieldNotInIndex) { + filters = filters.filter(filter => filter && !_.get(filter, ['meta', 'disabled'])); return { must: [], filter: filters diff --git a/packages/kbn-spec-to-console/lib/convert/params.js b/packages/kbn-spec-to-console/lib/convert/params.js index f195496f11301..b001044caba1e 100644 --- a/packages/kbn-spec-to-console/lib/convert/params.js +++ b/packages/kbn-spec-to-console/lib/convert/params.js @@ -34,7 +34,12 @@ module.exports = params => { result[param] = 0.0; break; case 'enum': - result[param] = options; + // This is to clean up entries like: "d (Days)". We only want the "d" part. + if (param === 'time') { + result[param] = options.map(option => option.split(' ')[0]); + } else { + result[param] = options; + } break; case 'boolean': result[param] = '__flag__'; diff --git a/src/core/public/chrome/ui/header/header.tsx b/src/core/public/chrome/ui/header/header.tsx index f24b0ed1681aa..4e73f49527856 100644 --- a/src/core/public/chrome/ui/header/header.tsx +++ b/src/core/public/chrome/ui/header/header.tsx @@ -406,12 +406,26 @@ class HeaderUI extends Component { data-test-subj="navDrawer" isLocked={isLocked} onIsLockedUpdate={onIsLockedUpdate} + aria-label={i18n.translate('core.ui.primaryNav.screenReaderLabel', { + defaultMessage: 'Primary', + })} > - + + ); diff --git a/src/core/public/http/http_fetch_error.ts b/src/core/public/http/http_fetch_error.ts index 5292afdd56d77..2156df5798974 100644 --- a/src/core/public/http/http_fetch_error.ts +++ b/src/core/public/http/http_fetch_error.ts @@ -21,6 +21,9 @@ import { IHttpFetchError } from './types'; /** @internal */ export class HttpFetchError extends Error implements IHttpFetchError { + public readonly req: Request; + public readonly res?: Response; + constructor( message: string, public readonly request: Request, @@ -28,6 +31,8 @@ export class HttpFetchError extends Error implements IHttpFetchError { public readonly body?: any ) { super(message); + this.req = request; + this.res = response; // captureStackTrace is only available in the V8 engine, so any browser using // a different JS engine won't have access to this method. diff --git a/src/core/public/http/http_service.test.ts b/src/core/public/http/http_service.test.ts index dddd2cc5ec36f..13906b91ed8df 100644 --- a/src/core/public/http/http_service.test.ts +++ b/src/core/public/http/http_service.test.ts @@ -24,6 +24,7 @@ import fetchMock from 'fetch-mock/es5/client'; import { readFileSync } from 'fs'; import { join } from 'path'; import { setup, SetupTap } from '../../../test_utils/public/http_test_setup'; +import { HttpResponse } from './types'; function delay(duration: number) { return new Promise(r => setTimeout(r, duration)); @@ -394,12 +395,12 @@ describe('interception', () => { const unusedSpy = jest.fn(); - http.intercept({ response: unusedSpy }); http.intercept({ responseError(response, controller) { controller.halt(); }, }); + http.intercept({ response: unusedSpy, responseError: unusedSpy }); http.post('/my/path').then(unusedSpy, unusedSpy); await delay(1000); @@ -416,21 +417,21 @@ describe('interception', () => { request: unusedSpy, requestError: usedSpy, response: unusedSpy, - responseError: usedSpy, + responseError: unusedSpy, }); http.intercept({ request() { throw new Error('Interception Error'); }, response: unusedSpy, - responseError: usedSpy, + responseError: unusedSpy, }); - http.intercept({ request: usedSpy, response: unusedSpy, responseError: usedSpy }); + http.intercept({ request: usedSpy, response: unusedSpy, responseError: unusedSpy }); await expect(http.fetch('/my/path')).rejects.toThrow(/Interception Error/); expect(fetchMock.called()).toBe(false); expect(unusedSpy).toHaveBeenCalledTimes(0); - expect(usedSpy).toHaveBeenCalledTimes(5); + expect(usedSpy).toHaveBeenCalledTimes(2); }); it('should succeed if request throws but caught by interceptor', async () => { @@ -458,26 +459,76 @@ describe('interception', () => { expect(usedSpy).toHaveBeenCalledTimes(4); }); - describe('request availability during interception', () => { - it('should not be available to responseError when request throws', async () => { - expect.assertions(3); + it('should accumulate request information', async () => { + const routes = ['alpha', 'beta', 'gamma']; + const createRequest = jest.fn( + (request: Request) => new Request(`/api/${routes.shift()}`, request) + ); - let spiedRequest: Request | undefined; + http.intercept({ + request: createRequest, + }); + http.intercept({ + requestError(httpErrorRequest) { + return httpErrorRequest.request; + }, + }); + http.intercept({ + request(request) { + throw new Error('Invalid'); + }, + }); + http.intercept({ + request: createRequest, + }); + http.intercept({ + request: createRequest, + }); - http.intercept({ - request() { - throw new Error('Internal Server Error'); - }, - responseError({ request }) { - spiedRequest = request; - }, - }); + await expect(http.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); + expect(fetchMock.called()).toBe(true); + expect(routes.length).toBe(0); + expect(createRequest.mock.calls[0][0].url).toContain('/my/route'); + expect(createRequest.mock.calls[1][0].url).toContain('/api/alpha'); + expect(createRequest.mock.calls[2][0].url).toContain('/api/beta'); + expect(fetchMock.lastCall()!.request.url).toContain('/api/gamma'); + }); - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(fetchMock.called()).toBe(false); - expect(spiedRequest).toBeUndefined(); + it('should accumulate response information', async () => { + const bodies = ['alpha', 'beta', 'gamma']; + const createResponse = jest.fn((httpResponse: HttpResponse) => ({ + body: bodies.shift(), + })); + + http.intercept({ + response: createResponse, + }); + http.intercept({ + response: createResponse, }); + http.intercept({ + response(httpResponse) { + throw new Error('Invalid'); + }, + }); + http.intercept({ + responseError({ error, ...httpResponse }) { + return httpResponse; + }, + }); + http.intercept({ + response: createResponse, + }); + + await expect(http.fetch('/my/route')).resolves.toEqual('gamma'); + expect(fetchMock.called()).toBe(true); + expect(bodies.length).toBe(0); + expect(createResponse.mock.calls[0][0].body).toEqual({ foo: 'bar' }); + expect(createResponse.mock.calls[1][0].body).toBe('alpha'); + expect(createResponse.mock.calls[2][0].body).toBe('beta'); + }); + describe('request availability during interception', () => { it('should be available to responseError when response throws', async () => { let spiedRequest: Request | undefined; @@ -514,22 +565,6 @@ describe('interception', () => { await expect(http.fetch('/my/path')).rejects.toThrow(); expect(spiedResponse).toBeDefined(); }); - - it('should not be available to responseError when request throws', async () => { - let spiedResponse: Response | undefined; - - http.intercept({ - request() { - throw new Error('Internal Server Error'); - }, - responseError({ response }) { - spiedResponse = response; - }, - }); - - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(spiedResponse).toBeUndefined(); - }); }); it('should actually halt request interceptors in reverse order', async () => { diff --git a/src/core/public/http/http_setup.ts b/src/core/public/http/http_setup.ts index 5ca3b23c5a69c..a10358926de1f 100644 --- a/src/core/public/http/http_setup.ts +++ b/src/core/public/http/http_setup.ts @@ -110,15 +110,14 @@ export const setup = ( (promise, interceptor) => promise.then( async (current: Request) => { + next = current; checkHalt(controller); if (!interceptor.request) { return current; } - next = (await interceptor.request(current, controller)) || current; - - return next; + return (await interceptor.request(current, controller)) || current; }, async error => { checkHalt(controller, error); @@ -155,17 +154,21 @@ export const setup = ( (promise, interceptor) => promise.then( async httpResponse => { + current = httpResponse; checkHalt(controller); if (!interceptor.response) { return httpResponse; } - current = (await interceptor.response(httpResponse, controller)) || httpResponse; - - return current; + return { + ...httpResponse, + ...((await interceptor.response(httpResponse, controller)) || {}), + }; }, async error => { + const request = error.request || (current && current.request); + checkHalt(controller, error); if (!interceptor.responseError) { @@ -176,7 +179,7 @@ export const setup = ( const next = await interceptor.responseError( { error, - request: error.request || (current && current.request), + request, response: error.response || (current && current.response), body: error.body || (current && current.body), }, @@ -189,17 +192,14 @@ export const setup = ( throw error; } - return next; + return { ...next, request }; } catch (err) { checkHalt(controller, err); throw err; } } ), - responsePromise.then(httpResponse => { - current = httpResponse; - return httpResponse; - }) + responsePromise ); return finalHttpResponse.body; @@ -249,18 +249,23 @@ export const setup = ( // We wrap the interception in a separate promise to ensure that when // a halt is called we do not resolve or reject, halting handling of the promise. return new Promise(async (resolve, reject) => { - try { - const value = await interceptResponse( - interceptRequest(initialRequest, controller).then(fetcher), - controller - ); - - resolve(value); - } catch (err) { + function rejectIfNotHalted(err: any) { if (!(err instanceof HttpInterceptHaltError)) { reject(err); } } + + try { + const request = await interceptRequest(initialRequest, controller); + + try { + resolve(await interceptResponse(fetcher(request), controller)); + } catch (err) { + rejectIfNotHalted(err); + } + } catch (err) { + rejectIfNotHalted(err); + } }); } diff --git a/src/core/public/http/types.ts b/src/core/public/http/types.ts index 9bc1313fae546..96500d566b3e5 100644 --- a/src/core/public/http/types.ts +++ b/src/core/public/http/types.ts @@ -226,25 +226,34 @@ export type HttpHandler = (path: string, options?: HttpFetchOptions) => Promise< export type HttpBody = BodyInit | null | any; /** @public */ -export interface HttpResponse { - request?: Request; +export interface InterceptedHttpResponse { response?: Response; body?: HttpBody; } +/** @public */ +export interface HttpResponse extends InterceptedHttpResponse { + request: Readonly; +} + /** @public */ export interface IHttpFetchError extends Error { readonly request: Request; readonly response?: Response; + /** + * @deprecated Provided for legacy compatibility. Prefer the `request` property instead. + */ + readonly req: Request; + /** + * @deprecated Provided for legacy compatibility. Prefer the `response` property instead. + */ + readonly res?: Response; readonly body?: any; } /** @public */ -export interface HttpErrorResponse { +export interface HttpErrorResponse extends HttpResponse { error: Error | IHttpFetchError; - request?: Request; - response?: Response; - body?: HttpBody; } /** @public */ export interface HttpErrorRequest { @@ -287,7 +296,7 @@ export interface HttpInterceptor { response?( httpResponse: HttpResponse, controller: IHttpInterceptController - ): Promise | HttpResponse | void; + ): Promise | InterceptedHttpResponse | void; /** * Define an interceptor to be executed if a response interceptor throws an error or returns a rejected Promise. @@ -297,7 +306,7 @@ export interface HttpInterceptor { responseError?( httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController - ): Promise | HttpResponse | void; + ): Promise | InterceptedHttpResponse | void; } /** diff --git a/src/core/public/index.ts b/src/core/public/index.ts index 3d451c7c1f37e..3d8714a001158 100644 --- a/src/core/public/index.ts +++ b/src/core/public/index.ts @@ -79,6 +79,8 @@ export { SavedObjectsBatchResponse, SavedObjectsBulkCreateObject, SavedObjectsBulkCreateOptions, + SavedObjectsBulkUpdateObject, + SavedObjectsBulkUpdateOptions, SavedObjectsCreateOptions, SavedObjectsFindResponsePublic, SavedObjectsUpdateOptions, @@ -110,6 +112,7 @@ export { IBasePath, IHttpInterceptController, IHttpFetchError, + InterceptedHttpResponse, } from './http'; export { diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 0db6e74c7e804..ec8a22fe5953c 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -426,15 +426,9 @@ export interface HttpErrorRequest { } // @public (undocumented) -export interface HttpErrorResponse { - // (undocumented) - body?: HttpBody; +export interface HttpErrorResponse extends HttpResponse { // (undocumented) error: Error | IHttpFetchError; - // (undocumented) - request?: Request; - // (undocumented) - response?: Response; } // @public @@ -463,8 +457,8 @@ export interface HttpHeadersInit { export interface HttpInterceptor { request?(request: Request, controller: IHttpInterceptController): Promise | Request | void; requestError?(httpErrorRequest: HttpErrorRequest, controller: IHttpInterceptController): Promise | Request | void; - response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; - responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; + response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; + responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; } // @public @@ -486,13 +480,9 @@ export interface HttpRequestInit { } // @public (undocumented) -export interface HttpResponse { +export interface HttpResponse extends InterceptedHttpResponse { // (undocumented) - body?: HttpBody; - // (undocumented) - request?: Request; - // (undocumented) - response?: Response; + request: Readonly; } // @public (undocumented) @@ -547,8 +537,12 @@ export type IContextProvider, TContextName export interface IHttpFetchError extends Error { // (undocumented) readonly body?: any; + // @deprecated (undocumented) + readonly req: Request; // (undocumented) readonly request: Request; + // @deprecated (undocumented) + readonly res?: Response; // (undocumented) readonly response?: Response; } @@ -559,6 +553,14 @@ export interface IHttpInterceptController { halted: boolean; } +// @public (undocumented) +export interface InterceptedHttpResponse { + // (undocumented) + body?: HttpBody; + // (undocumented) + response?: Response; +} + // @public export type IToasts = Pick; @@ -757,6 +759,26 @@ export interface SavedObjectsBulkCreateOptions { overwrite?: boolean; } +// @public (undocumented) +export interface SavedObjectsBulkUpdateObject { + // (undocumented) + attributes: T; + // (undocumented) + id: string; + // (undocumented) + references?: SavedObjectReference[]; + // (undocumented) + type: string; + // (undocumented) + version?: string; +} + +// @public (undocumented) +export interface SavedObjectsBulkUpdateOptions { + // (undocumented) + namespace?: string; +} + // @public export class SavedObjectsClient { // @internal @@ -766,6 +788,7 @@ export class SavedObjectsClient { id: string; type: string; }[]) => Promise>; + bulkUpdate(objects?: SavedObjectsBulkUpdateObject[]): Promise>; create: (type: string, attributes: T, options?: SavedObjectsCreateOptions) => Promise>; delete: (type: string, id: string) => Promise<{}>; find: (options: Pick) => Promise>; diff --git a/src/core/public/saved_objects/index.ts b/src/core/public/saved_objects/index.ts index 9452ece0ce823..74d33c506db48 100644 --- a/src/core/public/saved_objects/index.ts +++ b/src/core/public/saved_objects/index.ts @@ -21,11 +21,13 @@ export { SavedObjectsBatchResponse, SavedObjectsBulkCreateObject, SavedObjectsBulkCreateOptions, + SavedObjectsBulkUpdateObject, SavedObjectsClient, SavedObjectsClientContract, SavedObjectsCreateOptions, SavedObjectsFindResponsePublic, SavedObjectsUpdateOptions, + SavedObjectsBulkUpdateOptions, } from './saved_objects_client'; export { SimpleSavedObject } from './simple_saved_object'; export { SavedObjectsStart } from './saved_objects_service'; diff --git a/src/core/public/saved_objects/saved_objects_client.test.ts b/src/core/public/saved_objects/saved_objects_client.test.ts index 4c0fe90a5bfbd..e633e00965c6a 100644 --- a/src/core/public/saved_objects/saved_objects_client.test.ts +++ b/src/core/public/saved_objects/saved_objects_client.test.ts @@ -322,6 +322,43 @@ describe('SavedObjectsClient', () => { }); }); + describe('#bulk_update', () => { + const bulkUpdateDoc = { + id: 'AVwSwFxtcMV38qjDZoQg', + type: 'config', + attributes: { title: 'Example title' }, + version: 'foo', + }; + beforeEach(() => { + http.fetch.mockResolvedValue({ saved_objects: [bulkUpdateDoc] }); + }); + + test('resolves with array of SimpleSavedObject instances', async () => { + const response = savedObjectsClient.bulkUpdate([bulkUpdateDoc]); + await expect(response).resolves.toHaveProperty('savedObjects'); + + const result = await response; + expect(result.savedObjects).toHaveLength(1); + expect(result.savedObjects[0]).toBeInstanceOf(SimpleSavedObject); + }); + + test('makes HTTP call', async () => { + await savedObjectsClient.bulkUpdate([bulkUpdateDoc]); + expect(http.fetch.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + "/api/saved_objects/_bulk_update", + Object { + "body": "[{\\"id\\":\\"AVwSwFxtcMV38qjDZoQg\\",\\"type\\":\\"config\\",\\"attributes\\":{\\"title\\":\\"Example title\\"},\\"version\\":\\"foo\\"}]", + "method": "PUT", + "query": undefined, + }, + ], + ] + `); + }); + }); + describe('#find', () => { const object = { id: 'logstash-*', type: 'index-pattern', title: 'Test' }; @@ -419,15 +456,15 @@ describe('SavedObjectsClient', () => { }; http.fetch.mockRejectedValue(err); return expect(savedObjectsClient.get(doc.type, doc.id)).rejects.toMatchInlineSnapshot(` - Object { - "body": "response body", - "res": Object { - "ok": false, - "redirected": false, - "status": 409, - "statusText": "Conflict", - }, - } - `); + Object { + "body": "response body", + "res": Object { + "ok": false, + "redirected": false, + "status": 409, + "statusText": "Conflict", + }, + } + `); }); }); diff --git a/src/core/public/saved_objects/saved_objects_client.ts b/src/core/public/saved_objects/saved_objects_client.ts index cf0300157aece..729d356e76ebd 100644 --- a/src/core/public/saved_objects/saved_objects_client.ts +++ b/src/core/public/saved_objects/saved_objects_client.ts @@ -73,6 +73,22 @@ export interface SavedObjectsBulkCreateOptions { overwrite?: boolean; } +/** @public */ +export interface SavedObjectsBulkUpdateObject< + T extends SavedObjectAttributes = SavedObjectAttributes +> { + type: string; + id: string; + attributes: T; + version?: string; + references?: SavedObjectReference[]; +} + +/** @public */ +export interface SavedObjectsBulkUpdateOptions { + namespace?: string; +} + /** @public */ export interface SavedObjectsUpdateOptions { version?: string; @@ -411,6 +427,27 @@ export class SavedObjectsClient { }); } + /** + * Update multiple documents at once + * + * @param {array} objects - [{ type, id, attributes, options: { version, references } }] + * @returns The result of the update operation containing both failed and updated saved objects. + */ + public bulkUpdate(objects: SavedObjectsBulkUpdateObject[] = []) { + const path = this.getPath(['_bulk_update']); + + return this.savedObjectsFetch(path, { + method: 'PUT', + body: JSON.stringify(objects), + }).then(resp => { + resp.saved_objects = resp.saved_objects.map((d: SavedObject) => this.createSavedObject(d)); + return renameKeys< + PromiseType>, + SavedObjectsBatchResponse + >({ saved_objects: 'savedObjects' }, resp) as SavedObjectsBatchResponse; + }); + } + private createSavedObject( options: SavedObject ): SimpleSavedObject { diff --git a/src/core/public/saved_objects/saved_objects_service.mock.ts b/src/core/public/saved_objects/saved_objects_service.mock.ts index feace09806a97..247e684a24b92 100644 --- a/src/core/public/saved_objects/saved_objects_service.mock.ts +++ b/src/core/public/saved_objects/saved_objects_service.mock.ts @@ -24,6 +24,7 @@ const createStartContractMock = () => { client: { create: jest.fn(), bulkCreate: jest.fn(), + bulkUpdate: jest.fn(), delete: jest.fn(), bulkGet: jest.fn(), find: jest.fn(), diff --git a/src/core/server/http/http_server.test.ts b/src/core/server/http/http_server.test.ts index acae9d8ff0e70..f61371c5437e6 100644 --- a/src/core/server/http/http_server.test.ts +++ b/src/core/server/http/http_server.test.ts @@ -577,6 +577,45 @@ test('exposes route details of incoming request to a route handler', async () => }); }); +describe('conditional compression', () => { + test('disables compression when there is a referer', async () => { + const { registerRouter, server: innerServer } = await server.setup(config); + + const router = new Router('', logger, enhanceWithContext); + router.get({ path: '/', validate: false }, (context, req, res) => + // we need the large body here so that compression would normally be used + res.ok({ body: 'hello'.repeat(500), headers: { 'Content-Type': 'text/html; charset=UTF-8' } }) + ); + registerRouter(router); + + await server.start(); + const response = await supertest(innerServer.listener) + .get('/') + .set('accept-encoding', 'gzip') + .set('referer', 'http://some-other-site/'); + + expect(response.header).not.toHaveProperty('content-encoding'); + }); + + test(`enables compression when there isn't a referer`, async () => { + const { registerRouter, server: innerServer } = await server.setup(config); + + const router = new Router('', logger, enhanceWithContext); + router.get({ path: '/', validate: false }, (context, req, res) => + // we need the large body here so that compression will be used + res.ok({ body: 'hello'.repeat(500), headers: { 'Content-Type': 'text/html; charset=UTF-8' } }) + ); + registerRouter(router); + + await server.start(); + const response = await supertest(innerServer.listener) + .get('/') + .set('accept-encoding', 'gzip'); + + expect(response.header).toHaveProperty('content-encoding', 'gzip'); + }); +}); + describe('setup contract', () => { describe('#createSessionStorage', () => { it('creates session storage factory', async () => { diff --git a/src/core/server/http/http_server.ts b/src/core/server/http/http_server.ts index 3354324c12407..d6077200d3c75 100644 --- a/src/core/server/http/http_server.ts +++ b/src/core/server/http/http_server.ts @@ -96,6 +96,7 @@ export class HttpServer { const basePathService = new BasePath(config.basePath); this.setupBasePathRewrite(config, basePathService); + this.setupConditionalCompression(); return { registerRouter: this.registerRouter.bind(this), @@ -175,6 +176,23 @@ export class HttpServer { }); } + private setupConditionalCompression() { + if (this.server === undefined) { + throw new Error('Server is not created yet'); + } + + this.server.ext('onRequest', (request, h) => { + // whenever there is a referrer, don't use compression even if the client supports it + if (request.info.referrer !== '') { + this.log.debug( + `Not using compression because there is a referer: ${request.info.referrer}` + ); + request.info.acceptEncoding = ''; + } + return h.continue; + }); + } + private registerOnPostAuth(fn: OnPostAuthHandler) { if (this.server === undefined) { throw new Error('Server is not created yet'); diff --git a/src/core/server/index.ts b/src/core/server/index.ts index 90e5746b2766d..e0d230006d587 100644 --- a/src/core/server/index.ts +++ b/src/core/server/index.ts @@ -138,7 +138,9 @@ export { export { SavedObjectsBulkCreateObject, SavedObjectsBulkGetObject, + SavedObjectsBulkUpdateObject, SavedObjectsBulkResponse, + SavedObjectsBulkUpdateResponse, SavedObjectsClient, SavedObjectsClientProviderOptions, SavedObjectsClientWrapperFactory, diff --git a/src/core/server/plugins/plugins_service.test.ts b/src/core/server/plugins/plugins_service.test.ts index fdbb5efbfafec..d25f9087432bd 100644 --- a/src/core/server/plugins/plugins_service.test.ts +++ b/src/core/server/plugins/plugins_service.test.ts @@ -23,7 +23,7 @@ import { resolve, join } from 'path'; import { BehaviorSubject, from } from 'rxjs'; import { schema } from '@kbn/config-schema'; -import { Config, ConfigService, Env, ObjectToConfigAdapter } from '../config'; +import { Config, ConfigPath, ConfigService, Env, ObjectToConfigAdapter } from '../config'; import { getEnvOptions } from '../config/__mocks__/env'; import { elasticsearchServiceMock } from '../elasticsearch/elasticsearch_service.mock'; import { httpServiceMock } from '../http/http_service.mock'; @@ -55,6 +55,47 @@ const logger = loggingServiceMock.create(); }); }); +const createPlugin = ( + id: string, + { + path = id, + disabled = false, + version = 'some-version', + requiredPlugins = [], + optionalPlugins = [], + kibanaVersion = '7.0.0', + configPath = [path], + server = true, + ui = true, + }: { + path?: string; + disabled?: boolean; + version?: string; + requiredPlugins?: string[]; + optionalPlugins?: string[]; + kibanaVersion?: string; + configPath?: ConfigPath; + server?: boolean; + ui?: boolean; + } +): PluginWrapper => { + return new PluginWrapper({ + path, + manifest: { + id, + version, + configPath: `${configPath}${disabled ? '-disabled' : ''}`, + kibanaVersion, + requiredPlugins, + optionalPlugins, + server, + ui, + }, + opaqueId: Symbol(id), + initializerContext: { logger } as any, + }); +}; + beforeEach(async () => { mockPackage.raw = { branch: 'feature-v1', @@ -128,35 +169,19 @@ test('`discover` throws if discovered plugins with conflicting names', async () mockDiscover.mockReturnValue({ error$: from([]), plugin$: from([ - new PluginWrapper({ + createPlugin('conflicting-id', { path: 'path-4', - manifest: { - id: 'conflicting-id', - version: 'some-version', - configPath: 'path', - kibanaVersion: '7.0.0', - requiredPlugins: ['some-required-plugin', 'some-required-plugin-2'], - optionalPlugins: ['some-optional-plugin'], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + version: 'some-version', + configPath: 'path', + requiredPlugins: ['some-required-plugin', 'some-required-plugin-2'], + optionalPlugins: ['some-optional-plugin'], }), - new PluginWrapper({ - path: 'path-5', - manifest: { - id: 'conflicting-id', - version: 'some-other-version', - configPath: ['plugin', 'path'], - kibanaVersion: '7.0.0', - requiredPlugins: ['some-required-plugin'], - optionalPlugins: [], - server: true, - ui: false, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + createPlugin('conflicting-id', { + path: 'path-4', + version: 'some-version', + configPath: 'path', + requiredPlugins: ['some-required-plugin', 'some-required-plugin-2'], + optionalPlugins: ['some-optional-plugin'], }), ]), }); @@ -180,65 +205,25 @@ test('`discover` properly detects plugins that should be disabled.', async () => mockDiscover.mockReturnValue({ error$: from([]), plugin$: from([ - new PluginWrapper({ + createPlugin('explicitly-disabled-plugin', { + disabled: true, path: 'path-1', - manifest: { - id: 'explicitly-disabled-plugin', - version: 'some-version', - configPath: 'path-1-disabled', - kibanaVersion: '7.0.0', - requiredPlugins: [], - optionalPlugins: [], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + configPath: 'path-1', }), - new PluginWrapper({ + createPlugin('plugin-with-missing-required-deps', { path: 'path-2', - manifest: { - id: 'plugin-with-missing-required-deps', - version: 'some-version', - configPath: 'path-2', - kibanaVersion: '7.0.0', - requiredPlugins: ['missing-plugin'], - optionalPlugins: [], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + configPath: 'path-2', + requiredPlugins: ['missing-plugin'], }), - new PluginWrapper({ + createPlugin('plugin-with-disabled-transitive-dep', { path: 'path-3', - manifest: { - id: 'plugin-with-disabled-transitive-dep', - version: 'some-version', - configPath: 'path-3', - kibanaVersion: '7.0.0', - requiredPlugins: ['another-explicitly-disabled-plugin'], - optionalPlugins: [], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + configPath: 'path-3', + requiredPlugins: ['another-explicitly-disabled-plugin'], }), - new PluginWrapper({ + createPlugin('another-explicitly-disabled-plugin', { + disabled: true, path: 'path-4', - manifest: { - id: 'another-explicitly-disabled-plugin', - version: 'some-version', - configPath: 'path-4-disabled', - kibanaVersion: '7.0.0', - requiredPlugins: [], - optionalPlugins: [], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + configPath: 'path-4-disabled', }), ]), }); @@ -271,37 +256,77 @@ Array [ `); }); -test('`discover` properly invokes plugin discovery and ignores non-critical errors.', async () => { - const firstPlugin = new PluginWrapper({ +test('`discover` does not throw in case of mutual plugin dependencies', async () => { + const firstPlugin = createPlugin('first-plugin', { path: 'path-1', - manifest: { - id: 'some-id', - version: 'some-version', - configPath: 'path', - kibanaVersion: '7.0.0', - requiredPlugins: ['some-other-id'], - optionalPlugins: ['missing-optional-dep'], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + requiredPlugins: ['second-plugin'], + }); + const secondPlugin = createPlugin('second-plugin', { + path: 'path-2', + requiredPlugins: ['first-plugin'], }); - const secondPlugin = new PluginWrapper({ + mockDiscover.mockReturnValue({ + error$: from([]), + plugin$: from([firstPlugin, secondPlugin]), + }); + + await expect(pluginsService.discover()).resolves.toBeUndefined(); + + expect(mockDiscover).toHaveBeenCalledTimes(1); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledTimes(2); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(firstPlugin); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(secondPlugin); +}); + +test('`discover` does not throw in case of cyclic plugin dependencies', async () => { + const firstPlugin = createPlugin('first-plugin', { + path: 'path-1', + requiredPlugins: ['second-plugin'], + }); + const secondPlugin = createPlugin('second-plugin', { path: 'path-2', - manifest: { - id: 'some-other-id', - version: 'some-other-version', - configPath: ['plugin', 'path'], - kibanaVersion: '7.0.0', - requiredPlugins: [], - optionalPlugins: [], - server: true, - ui: false, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + requiredPlugins: ['third-plugin', 'last-plugin'], + }); + const thirdPlugin = createPlugin('third-plugin', { + path: 'path-3', + requiredPlugins: ['last-plugin', 'first-plugin'], + }); + const lastPlugin = createPlugin('last-plugin', { + path: 'path-4', + requiredPlugins: ['first-plugin'], + }); + const missingDepsPlugin = createPlugin('missing-deps-plugin', { + path: 'path-5', + requiredPlugins: ['not-a-plugin'], + }); + + mockDiscover.mockReturnValue({ + error$: from([]), + plugin$: from([firstPlugin, secondPlugin, thirdPlugin, lastPlugin, missingDepsPlugin]), + }); + + await expect(pluginsService.discover()).resolves.toBeUndefined(); + + expect(mockDiscover).toHaveBeenCalledTimes(1); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledTimes(4); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(firstPlugin); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(secondPlugin); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(thirdPlugin); + expect(mockPluginSystem.addPlugin).toHaveBeenCalledWith(lastPlugin); +}); + +test('`discover` properly invokes plugin discovery and ignores non-critical errors.', async () => { + const firstPlugin = createPlugin('some-id', { + path: 'path-1', + configPath: 'path', + requiredPlugins: ['some-other-id'], + optionalPlugins: ['missing-optional-dep'], + }); + const secondPlugin = createPlugin('some-other-id', { + path: 'path-2', + version: 'some-other-version', + configPath: ['plugin', 'path'], }); mockDiscover.mockReturnValue({ @@ -360,20 +385,9 @@ test('`discover` registers plugin config schema in config service', async () => mockDiscover.mockReturnValue({ error$: from([]), plugin$: from([ - new PluginWrapper({ + createPlugin('some-id', { path: 'path-with-schema', - manifest: { - id: 'some-id', - version: 'some-version', - configPath: 'path', - kibanaVersion: '7.0.0', - requiredPlugins: [], - optionalPlugins: [], - server: true, - ui: true, - }, - opaqueId: Symbol(), - initializerContext: { logger } as any, + configPath: 'path', }), ]), }); diff --git a/src/core/server/plugins/plugins_service.ts b/src/core/server/plugins/plugins_service.ts index 781b3ad309ba1..92b537deae337 100644 --- a/src/core/server/plugins/plugins_service.ts +++ b/src/core/server/plugins/plugins_service.ts @@ -175,15 +175,18 @@ export class PluginsService implements CoreService + pluginEnableStatuses: Map, + parents: PluginName[] = [] ): boolean { const pluginInfo = pluginEnableStatuses.get(pluginName); return ( pluginInfo !== undefined && pluginInfo.isEnabled && - pluginInfo.plugin.requiredPlugins.every(dependencyName => - this.shouldEnablePlugin(dependencyName, pluginEnableStatuses) - ) + pluginInfo.plugin.requiredPlugins + .filter(dep => !parents.includes(dep)) + .every(dependencyName => + this.shouldEnablePlugin(dependencyName, pluginEnableStatuses, [...parents, pluginName]) + ) ); } } diff --git a/src/core/server/plugins/plugins_system.test.ts b/src/core/server/plugins/plugins_system.test.ts index 7599ff0378caf..f67bd371ff565 100644 --- a/src/core/server/plugins/plugins_system.test.ts +++ b/src/core/server/plugins/plugins_system.test.ts @@ -117,7 +117,7 @@ test('`setupPlugins` throws plugin has missing required dependency', async () => pluginsSystem.addPlugin(createPlugin('some-id', { required: ['missing-dep'] })); await expect(pluginsSystem.setupPlugins(setupDeps)).rejects.toMatchInlineSnapshot( - `[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["some-id",{}]]]` + `[Error: Topological ordering of plugins did not complete, these plugins have cyclic or missing dependencies: ["some-id"]]` ); }); @@ -127,7 +127,7 @@ test('`setupPlugins` throws if plugins have circular required dependency', async pluginsSystem.addPlugin(createPlugin('depends-on-2', { required: ['depends-on-1'] })); await expect(pluginsSystem.setupPlugins(setupDeps)).rejects.toMatchInlineSnapshot( - `[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["depends-on-1",{}],["depends-on-2",{}]]]` + `[Error: Topological ordering of plugins did not complete, these plugins have cyclic or missing dependencies: ["depends-on-1","depends-on-2"]]` ); }); @@ -137,7 +137,7 @@ test('`setupPlugins` throws if plugins have circular optional dependency', async pluginsSystem.addPlugin(createPlugin('depends-on-2', { optional: ['depends-on-1'] })); await expect(pluginsSystem.setupPlugins(setupDeps)).rejects.toMatchInlineSnapshot( - `[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["depends-on-1",{}],["depends-on-2",{}]]]` + `[Error: Topological ordering of plugins did not complete, these plugins have cyclic or missing dependencies: ["depends-on-1","depends-on-2"]]` ); }); diff --git a/src/core/server/plugins/plugins_system.ts b/src/core/server/plugins/plugins_system.ts index 266a68b32703e..9f7d8e4f35172 100644 --- a/src/core/server/plugins/plugins_system.ts +++ b/src/core/server/plugins/plugins_system.ts @@ -245,9 +245,9 @@ export class PluginsSystem { } if (pluginsDependenciesGraph.size > 0) { - const edgesLeft = JSON.stringify([...pluginsDependenciesGraph.entries()]); + const edgesLeft = JSON.stringify([...pluginsDependenciesGraph.keys()]); throw new Error( - `Topological ordering of plugins did not complete, these edges could not be ordered: ${edgesLeft}` + `Topological ordering of plugins did not complete, these plugins have cyclic or missing dependencies: ${edgesLeft}` ); } diff --git a/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts b/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts index 89d555e06a634..57feebbf67ccd 100644 --- a/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts +++ b/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts @@ -18,6 +18,7 @@ */ import { SavedObject } from '../types'; +import { SavedObjectsClientMock } from '../../mocks'; import { getObjectReferencesToFetch, fetchNestedDependencies } from './inject_nested_depdendencies'; describe('getObjectReferencesToFetch()', () => { @@ -107,17 +108,8 @@ describe('getObjectReferencesToFetch()', () => { }); }); -describe('fetchNestedDependencies', () => { - const savedObjectsClient = { - errors: {} as any, - find: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - bulkCreate: jest.fn(), - delete: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; +describe('injectNestedDependencies', () => { + const savedObjectsClient = SavedObjectsClientMock.create(); afterEach(() => { jest.resetAllMocks(); @@ -487,6 +479,8 @@ describe('fetchNestedDependencies', () => { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { id: '2', diff --git a/src/core/server/saved_objects/import/import_saved_objects.test.ts b/src/core/server/saved_objects/import/import_saved_objects.test.ts index 194756462fc78..df95fb75f0f4f 100644 --- a/src/core/server/saved_objects/import/import_saved_objects.test.ts +++ b/src/core/server/saved_objects/import/import_saved_objects.test.ts @@ -20,7 +20,14 @@ import { Readable } from 'stream'; import { SavedObject } from '../types'; import { importSavedObjects } from './import_saved_objects'; +import { SavedObjectsClientMock } from '../../mocks'; +const emptyResponse = { + saved_objects: [], + total: 0, + per_page: 0, + page: 0, +}; describe('importSavedObjects()', () => { const savedObjects: SavedObject[] = [ { @@ -56,16 +63,7 @@ describe('importSavedObjects()', () => { references: [], }, ]; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { jest.resetAllMocks(); @@ -101,7 +99,7 @@ describe('importSavedObjects()', () => { this.push(null); }, }); - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValue({ saved_objects: savedObjects, }); @@ -184,7 +182,7 @@ describe('importSavedObjects()', () => { this.push(null); }, }); - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValue({ saved_objects: savedObjects, }); @@ -268,7 +266,7 @@ describe('importSavedObjects()', () => { this.push(null); }, }); - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValue({ saved_objects: savedObjects, }); @@ -351,7 +349,7 @@ describe('importSavedObjects()', () => { this.push(null); }, }); - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValue({ saved_objects: savedObjects.map(savedObject => ({ type: savedObject.type, @@ -360,6 +358,8 @@ describe('importSavedObjects()', () => { statusCode: 409, message: 'conflict', }, + attributes: {}, + references: [], })), }); const result = await importSavedObjects({ @@ -455,6 +455,8 @@ describe('importSavedObjects()', () => { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, ], }); @@ -530,7 +532,7 @@ describe('importSavedObjects()', () => { this.push(null); }, }); - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValue({ saved_objects: savedObjects, }); diff --git a/src/core/server/saved_objects/import/resolve_import_errors.test.ts b/src/core/server/saved_objects/import/resolve_import_errors.test.ts index 9d0e133c5951c..6aab8ef5adf9e 100644 --- a/src/core/server/saved_objects/import/resolve_import_errors.test.ts +++ b/src/core/server/saved_objects/import/resolve_import_errors.test.ts @@ -20,6 +20,7 @@ import { Readable } from 'stream'; import { SavedObject } from '../types'; import { resolveImportErrors } from './resolve_import_errors'; +import { SavedObjectsClientMock } from '../../mocks'; describe('resolveImportErrors()', () => { const savedObjects: SavedObject[] = [ @@ -62,16 +63,7 @@ describe('resolveImportErrors()', () => { ], }, ]; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { jest.resetAllMocks(); @@ -316,6 +308,8 @@ describe('resolveImportErrors()', () => { statusCode: 409, message: 'conflict', }, + attributes: {}, + references: [], })), }); const result = await resolveImportErrors({ @@ -416,6 +410,8 @@ describe('resolveImportErrors()', () => { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, ], }); diff --git a/src/core/server/saved_objects/import/validate_references.test.ts b/src/core/server/saved_objects/import/validate_references.test.ts index 1a558b3d82b32..269cd3055b047 100644 --- a/src/core/server/saved_objects/import/validate_references.test.ts +++ b/src/core/server/saved_objects/import/validate_references.test.ts @@ -18,18 +18,10 @@ */ import { getNonExistingReferenceAsKeys, validateReferences } from './validate_references'; +import { SavedObjectsClientMock } from '../../mocks'; describe('getNonExistingReferenceAsKeys()', () => { - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { jest.resetAllMocks(); @@ -176,6 +168,8 @@ describe('getNonExistingReferenceAsKeys()', () => { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { id: '3', @@ -184,6 +178,8 @@ describe('getNonExistingReferenceAsKeys()', () => { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, ], }); @@ -226,16 +222,7 @@ describe('getNonExistingReferenceAsKeys()', () => { }); describe('validateReferences()', () => { - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { jest.resetAllMocks(); @@ -262,6 +249,8 @@ Object { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { type: 'index-pattern', @@ -270,6 +259,8 @@ Object { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { type: 'index-pattern', @@ -278,6 +269,8 @@ Object { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { type: 'search', @@ -286,6 +279,8 @@ Object { statusCode: 404, message: 'Not found', }, + attributes: {}, + references: [], }, { id: '8', @@ -611,6 +606,8 @@ Object { statusCode: 400, message: 'Error', }, + attributes: {}, + references: [], }, ], }); diff --git a/src/core/server/saved_objects/service/lib/repository.test.js b/src/core/server/saved_objects/service/lib/repository.test.js index 7e89b8f3c0820..9e4edcd8d2943 100644 --- a/src/core/server/saved_objects/service/lib/repository.test.js +++ b/src/core/server/saved_objects/service/lib/repository.test.js @@ -18,6 +18,7 @@ */ import { delay } from 'bluebird'; +import _ from 'lodash'; import { SavedObjectsRepository } from './repository'; import * as getSearchDslNS from './search_dsl/search_dsl'; @@ -1963,6 +1964,459 @@ describe('SavedObjectsRepository', () => { }); }); + describe('#bulkUpdate', () => { + const { generateSavedObject, reset } = (() => { + let count = 0; + return { + generateSavedObject(overrides) { + count++; + return _.merge({ + type: 'index-pattern', + id: `logstash-${count}`, + attributes: { title: `Testing ${count}` }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }, overrides); + }, + reset() { + count = 0; + } + }; + })(); + + beforeEach(() => { + reset(); + }); + + const mockValidResponse = objects => + callAdminCluster.mockReturnValue({ + items: objects.map(items => ({ + update: { + _id: `${items.type}:${items.id}`, + _type: '_doc', + ...mockVersionProps, + result: 'updated', + } + })), + }); + + + it('waits until migrations are complete before proceeding', async () => { + const objects = [ + generateSavedObject(), + generateSavedObject() + ]; + + migrator.runMigrations = jest.fn(async () => + expect(callAdminCluster).not.toHaveBeenCalled() + ); + + mockValidResponse(objects); + + await expect( + savedObjectsRepository.bulkUpdate([ + generateSavedObject(), + ]) + ).resolves.toBeDefined(); + + expect(migrator.runMigrations).toHaveReturnedTimes(1); + }); + + it('returns current ES document, _seq_no and _primary_term encoded as version', async () => { + const objects = [ + generateSavedObject(), + generateSavedObject() + ]; + + mockValidResponse(objects); + + const response = await savedObjectsRepository.bulkUpdate(objects); + + expect(response.saved_objects[0]).toMatchObject({ + ..._.pick(objects[0], 'id', 'type', 'attributes'), + version: mockVersion, + references: objects[0].references + }); + expect(response.saved_objects[1]).toMatchObject({ + ..._.pick(objects[1], 'id', 'type', 'attributes'), + version: mockVersion, + references: objects[1].references + }); + }); + + it('handles a mix of succesfull updates and errors', async () => { + const objects = [ + generateSavedObject(), + { + type: 'invalid-type', + id: 'invalid', + attributes: { title: 'invalid' } + }, + generateSavedObject(), + generateSavedObject({ + id: 'version_clash' + }), + ]; + + callAdminCluster.mockReturnValue({ + items: objects + // remove invalid from mocks + .filter(item => item.id !== 'invalid') + .map(items => { + switch(items.id) { + case 'version_clash': + return ({ + update: { + _id: `${items.type}:${items.id}`, + _type: '_doc', + error: { + type: 'version_conflict_engine_exception' + } + } + }); + default: + return ({ + update: { + _id: `${items.type}:${items.id}`, + _type: '_doc', + ...mockVersionProps, + result: 'updated', + } + }); + } + }), + }); + + const { saved_objects: [ + firstUpdatedObject, + invalidType, + secondUpdatedObject, + versionClashObject + ] } = await savedObjectsRepository.bulkUpdate(objects); + + expect(firstUpdatedObject).toMatchObject({ + ..._.pick(objects[0], 'id', 'type', 'attributes', 'references'), + version: mockVersion + }); + + expect(invalidType).toMatchObject({ + ..._.pick(objects[1], 'id', 'type'), + error: SavedObjectsErrorHelpers.createGenericNotFoundError('invalid-type', 'invalid').output.payload, + }); + + expect(secondUpdatedObject).toMatchObject({ + ..._.pick(objects[2], 'id', 'type', 'attributes', 'references'), + version: mockVersion + }); + + expect(versionClashObject).toMatchObject({ + ..._.pick(objects[3], 'id', 'type'), + error: { statusCode: 409, message: 'version conflict, document already exists' }, + }); + }); + + it('doesnt call Elasticsearch if there are no valid objects to update', async () => { + const objects = [ + { + type: 'invalid-type', + id: 'invalid', + attributes: { title: 'invalid' } + }, + { + type: 'invalid-type', + id: 'invalid 2', + attributes: { title: 'invalid' } + }, + ]; + + const { saved_objects: [ + invalidType, + invalidType2 + ] } = await savedObjectsRepository.bulkUpdate(objects); + + expect(callAdminCluster).not.toHaveBeenCalled(); + + expect(invalidType).toMatchObject({ + ..._.pick(objects[0], 'id', 'type'), + error: SavedObjectsErrorHelpers.createGenericNotFoundError('invalid-type', 'invalid').output.payload, + }); + + expect(invalidType2).toMatchObject({ + ..._.pick(objects[1], 'id', 'type'), + error: SavedObjectsErrorHelpers.createGenericNotFoundError('invalid-type', 'invalid 2').output.payload, + }); + }); + + it('accepts version', async () => { + const objects = [ + generateSavedObject({ + version: encodeHitVersion({ + _seq_no: 100, + _primary_term: 200, + }), + }), + generateSavedObject({ + version: encodeHitVersion({ + _seq_no: 300, + _primary_term: 400, + }), + }) + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + expect(callAdminCluster).toHaveBeenCalledTimes(1); + + const [, { body: [{ update: firstUpdate },, { update: secondUpdate }] }] = callAdminCluster.mock.calls[0]; + + expect(firstUpdate).toMatchObject({ + if_seq_no: 100, + if_primary_term: 200, + }); + + expect(secondUpdate).toMatchObject({ + if_seq_no: 300, + if_primary_term: 400, + }); + }); + + it('does not pass references if omitted', async () => { + const objects = [ + { + type: 'index-pattern', + id: `logstash-no-ref`, + attributes: { title: `Testing no-ref` } + } + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + expect(callAdminCluster).toHaveBeenCalledTimes(1); + + const [, { body: [, { doc: firstDoc }] }] = callAdminCluster.mock.calls[0]; + + expect(firstDoc).not.toMatchObject({ + references: [], + }); + }); + + it('passes references if they are provided', async () => { + const objects = [ + generateSavedObject({ + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }) + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + expect(callAdminCluster).toHaveBeenCalledTimes(1); + + const [, { body: [, { doc }] } ] = callAdminCluster.mock.calls[0]; + + expect(doc).toMatchObject({ + references: [{ + name: 'ref_0', + type: 'test', + id: '1', + }], + }); + }); + + it('passes empty references array if empty references array is provided', async () => { + const objects = [ + { + type: 'index-pattern', + id: `logstash-no-ref`, + attributes: { title: `Testing no-ref` }, + references: [] + } + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + expect(callAdminCluster).toHaveBeenCalledTimes(1); + + const [, { body: [, { doc }] } ] = callAdminCluster.mock.calls[0]; + + expect(doc).toMatchObject({ + references: [], + }); + }); + + it(`prepends namespace to the id but doesn't add namespace to body when providing namespace for namespaced type`, async () => { + + const objects = [ + generateSavedObject(), + generateSavedObject() + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects, { + namespace: 'foo-namespace' + }); + + const [, + { body: [ + { update: firstUpdate }, + { doc: firstUpdateDoc }, + { update: secondUpdate }, + { doc: secondUpdateDoc } + ] + } + ] = callAdminCluster.mock.calls[0]; + + expect(firstUpdate).toMatchObject({ + _id: 'foo-namespace:index-pattern:logstash-1', + _index: '.kibana-test', + }); + + expect(firstUpdateDoc).toMatchObject({ + updated_at: mockTimestamp, + 'index-pattern': { title: 'Testing 1' }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }); + + expect(secondUpdate).toMatchObject({ + _id: 'foo-namespace:index-pattern:logstash-2', + _index: '.kibana-test', + }); + + expect(secondUpdateDoc).toMatchObject({ + updated_at: mockTimestamp, + 'index-pattern': { title: 'Testing 2' }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }); + + expect(onBeforeWrite).toHaveBeenCalledTimes(1); + }); + + it(`doesn't prepend namespace to the id or add namespace property when providing no namespace for namespaced type`, async () => { + + const objects = [ + generateSavedObject(), + generateSavedObject() + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + const [, + { body: [ + { update: firstUpdate }, + { doc: firstUpdateDoc }, + { update: secondUpdate }, + { doc: secondUpdateDoc } + ] + } + ] = callAdminCluster.mock.calls[0]; + + expect(firstUpdate).toMatchObject({ + _id: 'index-pattern:logstash-1', + _index: '.kibana-test', + }); + + expect(firstUpdateDoc).toMatchObject({ + updated_at: mockTimestamp, + 'index-pattern': { title: 'Testing 1' }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }); + + expect(secondUpdate).toMatchObject({ + _id: 'index-pattern:logstash-2', + _index: '.kibana-test', + }); + + expect(secondUpdateDoc).toMatchObject({ + updated_at: mockTimestamp, + 'index-pattern': { title: 'Testing 2' }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }); + + expect(onBeforeWrite).toHaveBeenCalledTimes(1); + }); + + it(`doesn't prepend namespace to the id or add namespace property when providing namespace for namespace agnostic type`, async () => { + + const objects = [ + generateSavedObject({ + type: 'globaltype', + id: 'foo', + namespace: 'foo-namespace' + }) + ]; + + mockValidResponse(objects); + + await savedObjectsRepository.bulkUpdate(objects); + + const [, + { body: [{ update }, { doc }] } + ] = callAdminCluster.mock.calls[0]; + + expect(update).toMatchObject({ + _id: 'globaltype:foo', + _index: '.kibana-test', + }); + + expect(doc).toMatchObject({ + updated_at: mockTimestamp, + globaltype: { title: 'Testing 1' }, + references: [ + { + name: 'ref_0', + type: 'test', + id: '1', + }, + ], + }); + }); + }); + describe('#incrementCounter', () => { beforeEach(() => { callAdminCluster.mockImplementation((method, params) => ({ diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts index b4723f35b1efc..ea6c7f87a55e9 100644 --- a/src/core/server/saved_objects/service/lib/repository.ts +++ b/src/core/server/saved_objects/service/lib/repository.ts @@ -34,10 +34,12 @@ import { SavedObjectsBulkCreateObject, SavedObjectsBulkGetObject, SavedObjectsBulkResponse, + SavedObjectsBulkUpdateResponse, SavedObjectsCreateOptions, SavedObjectsFindResponse, SavedObjectsUpdateOptions, SavedObjectsUpdateResponse, + SavedObjectsBulkUpdateObject, } from '../saved_objects_client'; import { SavedObject, @@ -279,22 +281,12 @@ export class SavedObjectsRepository { const id = requestedId || responseId; if (error) { - if (error.type === 'version_conflict_engine_exception') { - return { - id, - type, - error: { statusCode: 409, message: 'version conflict, document already exists' }, - }; - } return { id, type, - error: { - message: error.reason || JSON.stringify(error), - }, + error: getBulkOperationError(error, type, id), }; } - return { id, type, @@ -673,6 +665,109 @@ export class SavedObjectsRepository { }; } + /** + * Updates multiple objects in bulk + * + * @param {array} objects - [{ type, id, attributes, options: { version, namespace } references }] + * @property {string} options.version - ensures version matches that of persisted object + * @property {string} [options.namespace] + * @returns {promise} - {saved_objects: [[{ id, type, version, references, attributes, error: { message } }]} + */ + async bulkUpdate( + objects: Array>, + options: SavedObjectsBaseOptions = {} + ): Promise> { + const time = this._getCurrentTime(); + const bulkUpdateParams: object[] = []; + + let requestIndexCounter = 0; + const expectedResults: Array> = objects.map(object => { + const { type, id } = object; + + if (!this._allowedTypes.includes(type)) { + return { + tag: 'Left' as 'Left', + error: { + id, + type, + error: SavedObjectsErrorHelpers.createGenericNotFoundError(type, id).output.payload, + }, + }; + } + + const { attributes, references, version } = object; + const { namespace } = options; + + const documentToSave = { + [type]: attributes, + updated_at: time, + references, + }; + + if (!Array.isArray(documentToSave.references)) { + delete documentToSave.references; + } + + const expectedResult = { + type, + id, + esRequestIndex: requestIndexCounter++, + documentToSave, + }; + + bulkUpdateParams.push( + { + update: { + _id: this._serializer.generateRawId(namespace, type, id), + _index: this.getIndexForType(type), + ...(version && decodeRequestVersion(version)), + }, + }, + { doc: documentToSave } + ); + + return { tag: 'Right' as 'Right', value: expectedResult }; + }); + + const esResponse = bulkUpdateParams.length + ? await this._writeToCluster('bulk', { + refresh: 'wait_for', + body: bulkUpdateParams, + }) + : {}; + + return { + saved_objects: expectedResults.map(expectedResult => { + if (isLeft(expectedResult)) { + return expectedResult.error; + } + + const { type, id, documentToSave, esRequestIndex } = expectedResult.value; + const response = esResponse.items[esRequestIndex]; + const { error, _seq_no: seqNo, _primary_term: primaryTerm } = Object.values( + response + )[0] as any; + + const { [type]: attributes, references, updated_at } = documentToSave; + if (error) { + return { + id, + type, + error: getBulkOperationError(error, type, id), + }; + } + return { + id, + type, + updated_at, + version: encodeVersion(seqNo, primaryTerm), + attributes, + references, + }; + }), + }; + } + /** * Increases a counter field by one. Creates the document if one doesn't exist for the given id. * @@ -802,3 +897,16 @@ export class SavedObjectsRepository { return omit(savedObject, 'namespace'); } } + +function getBulkOperationError(error: { type: string; reason?: string }, type: string, id: string) { + switch (error.type) { + case 'version_conflict_engine_exception': + return { statusCode: 409, message: 'version conflict, document already exists' }; + case 'document_missing_exception': + return SavedObjectsErrorHelpers.createGenericNotFoundError(type, id).output.payload; + default: + return { + message: error.reason || JSON.stringify(error), + }; + } +} diff --git a/src/core/server/saved_objects/service/saved_objects_client.mock.ts b/src/core/server/saved_objects/service/saved_objects_client.mock.ts index 187653db9a308..63c9a0ee35ae0 100644 --- a/src/core/server/saved_objects/service/saved_objects_client.mock.ts +++ b/src/core/server/saved_objects/service/saved_objects_client.mock.ts @@ -25,6 +25,7 @@ const create = () => errors: SavedObjectsErrorHelpers, create: jest.fn(), bulkCreate: jest.fn(), + bulkUpdate: jest.fn(), delete: jest.fn(), bulkGet: jest.fn(), find: jest.fn(), diff --git a/src/core/server/saved_objects/service/saved_objects_client.test.js b/src/core/server/saved_objects/service/saved_objects_client.test.js index 1ce738b38f3e7..cf4fa962a70b8 100644 --- a/src/core/server/saved_objects/service/saved_objects_client.test.js +++ b/src/core/server/saved_objects/service/saved_objects_client.test.js @@ -127,3 +127,21 @@ test(`#update`, async () => { expect(mockRepository.update).toHaveBeenCalledWith(type, id, attributes, options); expect(result).toBe(returnValue); }); + +test(`#bulkUpdate`, async () => { + const returnValue = Symbol(); + const mockRepository = { + bulkUpdate: jest.fn().mockResolvedValue(returnValue), + }; + const client = new SavedObjectsClient(mockRepository); + + const type = Symbol(); + const id = Symbol(); + const attributes = Symbol(); + const version = Symbol(); + const namespace = Symbol(); + const result = await client.bulkUpdate([{ type, id, attributes, version }], { namespace }); + + expect(mockRepository.bulkUpdate).toHaveBeenCalledWith([{ type, id, attributes, version }], { namespace }); + expect(result).toBe(returnValue); +}); diff --git a/src/core/server/saved_objects/service/saved_objects_client.ts b/src/core/server/saved_objects/service/saved_objects_client.ts index b83d72f9ad46d..4e04a08bd5212 100644 --- a/src/core/server/saved_objects/service/saved_objects_client.ts +++ b/src/core/server/saved_objects/service/saved_objects_client.ts @@ -55,6 +55,20 @@ export interface SavedObjectsBulkCreateObject + extends Pick { + /** The ID of this Saved Object, guaranteed to be unique for all objects of the same `type` */ + id: string; + /** The type of this Saved Object. Each plugin can define it's own custom Saved Object types. */ + type: string; + /** {@inheritdoc SavedObjectAttributes} */ + attributes: Partial; +} + /** * * @public @@ -83,8 +97,9 @@ export interface SavedObjectsFindResponse * @public */ export interface SavedObjectsUpdateOptions extends SavedObjectsBaseOptions { - /** Ensures version matches that of persisted object */ + /** An opaque version number which changes on each successful write operation. Can be used for implementing optimistic concurrency control. */ version?: string; + /** {@inheritdoc SavedObjectReference} */ references?: SavedObjectReference[]; } @@ -107,6 +122,14 @@ export interface SavedObjectsBulkResponse saved_objects: Array>; } +/** + * + * @public + */ +export interface SavedObjectsBulkUpdateResponse { + saved_objects: Array>; +} + /** * * @public @@ -229,4 +252,16 @@ export class SavedObjectsClient { ): Promise> { return await this._repository.update(type, id, attributes, options); } + + /** + * Bulk Updates multiple SavedObject at once + * + * @param objects + */ + async bulkUpdate( + objects: Array>, + options?: SavedObjectsBaseOptions + ): Promise> { + return await this._repository.bulkUpdate(objects, options); + } } diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index a329fda5d8593..9740f1f7032d1 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -1189,12 +1189,26 @@ export interface SavedObjectsBulkResponse saved_objects: Array>; } +// @public (undocumented) +export interface SavedObjectsBulkUpdateObject extends Pick { + attributes: Partial; + id: string; + type: string; +} + +// @public (undocumented) +export interface SavedObjectsBulkUpdateResponse { + // (undocumented) + saved_objects: Array>; +} + // @public (undocumented) export class SavedObjectsClient { // Warning: (ae-forgotten-export) The symbol "SavedObjectsRepository" needs to be exported by the entry point index.d.ts constructor(repository: SavedObjectsRepository); bulkCreate(objects: Array>, options?: SavedObjectsCreateOptions): Promise>; bulkGet(objects?: SavedObjectsBulkGetObject[], options?: SavedObjectsBaseOptions): Promise>; + bulkUpdate(objects: Array>, options?: SavedObjectsBaseOptions): Promise>; create(type: string, attributes: T, options?: SavedObjectsCreateOptions): Promise>; delete(type: string, id: string, options?: SavedObjectsBaseOptions): Promise<{}>; // (undocumented) @@ -1539,7 +1553,6 @@ export class SavedObjectsSerializer { // @public (undocumented) export interface SavedObjectsUpdateOptions extends SavedObjectsBaseOptions { - // (undocumented) references?: SavedObjectReference[]; version?: string; } diff --git a/src/core/server/ui_settings/create_objects_client_stub.ts b/src/core/server/ui_settings/create_objects_client_stub.ts index d52ec58fa7e37..1e4a5e6fb58ec 100644 --- a/src/core/server/ui_settings/create_objects_client_stub.ts +++ b/src/core/server/ui_settings/create_objects_client_stub.ts @@ -28,6 +28,7 @@ export interface SavedObjectsClientStub { create: sinon.SinonStub; bulkCreate: sinon.SinonStub; bulkGet: sinon.SinonStub; + bulkUpdate: sinon.SinonStub; delete: sinon.SinonStub; find: sinon.SinonStub; errors: typeof savedObjectsClientErrors; @@ -41,6 +42,7 @@ export function createObjectsClientStub(esDocSource = {}): SavedObjectsClientStu errors: savedObjectsClientErrors, bulkCreate: sinon.stub(), bulkGet: sinon.stub(), + bulkUpdate: sinon.stub(), delete: sinon.stub(), find: sinon.stub(), }; diff --git a/src/core/utils/map_utils.test.ts b/src/core/utils/map_utils.test.ts new file mode 100644 index 0000000000000..0d9b2a6129de0 --- /dev/null +++ b/src/core/utils/map_utils.test.ts @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { mapValuesOfMap, groupIntoMap } from './map_utils'; + +describe('groupIntoMap', () => { + it('returns an empty map when there are no items to map', () => { + const groupBy = jest.fn(); + + expect(groupIntoMap([], groupBy)).toEqual(new Map()); + expect(groupBy).not.toHaveBeenCalled(); + }); + + it('calls groupBy for each item in the collection', () => { + const groupBy = jest.fn(); + + groupIntoMap([{ id: 1 }, { id: 2 }, { id: 3 }], groupBy); + + expect(groupBy).toHaveBeenCalledTimes(3); + expect(groupBy).toHaveBeenCalledWith({ id: 1 }); + expect(groupBy).toHaveBeenCalledWith({ id: 2 }); + expect(groupBy).toHaveBeenCalledWith({ id: 3 }); + }); + + it('returns each item in the key returned by groupBy', () => { + const groupBy = (item: { id: number }) => item.id; + + expect(groupIntoMap([{ id: 1 }, { id: 2 }, { id: 3 }], groupBy)).toEqual( + new Map([[1, [{ id: 1 }]], [2, [{ id: 2 }]], [3, [{ id: 3 }]]]) + ); + }); + + it('groups items under the same key returned by groupBy', () => { + const groupBy = (item: { id: number }) => (item.id % 2 === 0 ? 'even' : 'odd'); + + const expectedResult = new Map(); + expectedResult.set('even', [{ id: 2 }]); + expectedResult.set('odd', [{ id: 1 }, { id: 3 }]); + expect(groupIntoMap([{ id: 1 }, { id: 2 }, { id: 3 }], groupBy)).toEqual(expectedResult); + }); + + it('supports Symbols as keys', () => { + const even = Symbol('even'); + const odd = Symbol('odd'); + const groupBy = (item: { id: number }) => (item.id % 2 === 0 ? even : odd); + + const expectedResult = new Map(); + expectedResult.set(even, [{ id: 2 }]); + expectedResult.set(odd, [{ id: 1 }, { id: 3 }]); + expect(groupIntoMap([{ id: 1 }, { id: 2 }, { id: 3 }], groupBy)).toEqual(expectedResult); + }); +}); + +describe('mapValuesOfMap', () => { + it('applys the mapper to each value in a map', () => { + const mapper = jest.fn(); + + const even = Symbol('even'); + const odd = Symbol('odd'); + + const map = new Map(); + map.set(even, 2); + map.set(odd, 1); + + mapValuesOfMap(map, mapper); + expect(mapper).toHaveBeenCalledWith(1); + expect(mapper).toHaveBeenCalledWith(2); + }); + + it('returns a new map with each value mapped to the value returned by the mapper', () => { + const mapper = (i: number) => i * 3; + + const even = Symbol('even'); + const odd = Symbol('odd'); + + const map = new Map(); + map.set(even, 2); + map.set(odd, 1); + + expect(mapValuesOfMap(map, mapper)).toEqual(new Map([[even, 6], [odd, 3]])); + expect(map.get(odd)).toEqual(1); + expect(map.get(even)).toEqual(2); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/is_request.js b/src/core/utils/map_utils.ts similarity index 60% rename from src/legacy/ui/public/courier/fetch/is_request.js rename to src/core/utils/map_utils.ts index 73c54d6f4bca1..47a1d6b34b99f 100644 --- a/src/legacy/ui/public/courier/fetch/is_request.js +++ b/src/core/utils/map_utils.ts @@ -17,12 +17,21 @@ * under the License. */ -import { SearchRequestProvider } from './request'; - -export function IsRequestProvider(Private) { - const SearchRequest = Private(SearchRequestProvider); +export function mapValuesOfMap(map: Map, mapper: (item: G) => H): Map { + const result = new Map(); + for (const [key, value] of map.entries()) { + result.set(key, mapper(value)); + } + return result; +} - return function isRequest(obj) { - return obj instanceof SearchRequest; - }; +export function groupIntoMap(collection: T[], groupBy: (item: T) => G): Map { + const map = new Map(); + collection.forEach(item => { + const key = groupBy(item); + const values = map.get(key) || []; + values.push(item); + map.set(key, values); + }); + return map; } diff --git a/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/editor.tsx b/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/editor.tsx index 655834f49ca9f..dd891b42f6856 100644 --- a/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/editor.tsx +++ b/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/editor.tsx @@ -17,7 +17,7 @@ * under the License. */ -import React, { CSSProperties, useEffect, useRef, useState } from 'react'; +import React, { CSSProperties, useCallback, useEffect, useRef, useState } from 'react'; import { EuiToolTip } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; @@ -143,7 +143,7 @@ function _Editor({ previousStateLocation = 'stored' }: EditorProps) { }; }, []); - const sendCurrentRequestToES = () => { + const sendCurrentRequestToES = useCallback(() => { dispatch({ type: 'sendRequestToEs', value: { @@ -153,7 +153,7 @@ function _Editor({ previousStateLocation = 'stored' }: EditorProps) { history.addToHistory(esPath, esMethod, esData), }, }); - }; + }, [settings]); useEffect(() => { applyCurrentSettings(editorInstanceRef.current!, settings); @@ -167,7 +167,7 @@ function _Editor({ previousStateLocation = 'stored' }: EditorProps) { sendCurrentRequestToES, openDocumentation, }); - }, []); + }, [sendCurrentRequestToES]); return (
diff --git a/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/keyboard_shortcuts.ts b/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/keyboard_shortcuts.ts index be1826afd7827..1be571f2739ac 100644 --- a/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/keyboard_shortcuts.ts +++ b/src/legacy/core_plugins/console/np_ready/public/application/containers/editor/legacy/console_editor/keyboard_shortcuts.ts @@ -18,7 +18,7 @@ */ interface Actions { - input: any; + input: any; // TODO: Wrap this in an editor interface sendCurrentRequestToES: () => void; openDocumentation: () => void; } diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.aliases.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.aliases.json index 704cd39816040..2135bd67e57d8 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.aliases.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.aliases.json @@ -3,7 +3,6 @@ "url_params": { "format": "", "local": "__flag__", - "master_timeout": "", "h": [], "help": "__flag__", "s": [], diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.count.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.count.json index 56377699e004e..20d36cc717ed2 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.count.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.count.json @@ -2,8 +2,6 @@ "cat.count": { "url_params": { "format": "", - "local": "__flag__", - "master_timeout": "", "h": [], "help": "__flag__", "s": [], diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.fielddata.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.fielddata.json index 8e7d73333ab32..a3212973e9fc6 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.fielddata.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.fielddata.json @@ -15,8 +15,6 @@ "p", "pb" ], - "local": "__flag__", - "master_timeout": "", "h": [], "help": "__flag__", "s": [], diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.health.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.health.json index 9bf7b17b7a2a9..2b6905cc711e0 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.health.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.health.json @@ -2,11 +2,18 @@ "cat.health": { "url_params": { "format": "", - "local": "__flag__", - "master_timeout": "", "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "ts": "__flag__", "v": "__flag__" }, diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.indices.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.indices.json index a67ed9aa7282c..45da7f054bfb4 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.indices.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.indices.json @@ -19,6 +19,15 @@ "help": "__flag__", "pri": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__", "include_unloaded_segments": "__flag__" }, diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.nodes.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.nodes.json index 650fe122ba548..2e89a66ef1f35 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.nodes.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.nodes.json @@ -1,6 +1,19 @@ { "cat.nodes": { "url_params": { + "bytes": [ + "b", + "k", + "kb", + "m", + "mb", + "g", + "gb", + "t", + "tb", + "p", + "pb" + ], "format": "", "full_id": "__flag__", "local": "__flag__", @@ -8,6 +21,15 @@ "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.pending_tasks.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.pending_tasks.json index e719875745bdc..4b24db19a50dc 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.pending_tasks.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.pending_tasks.json @@ -7,6 +7,15 @@ "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.recovery.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.recovery.json index 7a828a3333df3..8db8f0363ff3e 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.recovery.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.recovery.json @@ -17,11 +17,19 @@ "pb" ], "detailed": "__flag__", - "master_timeout": "", "h": [], "help": "__flag__", "index": [], "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.shards.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.shards.json index 767131c0bd009..2b66562e3d6df 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.shards.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.shards.json @@ -20,6 +20,15 @@ "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.snapshots.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.snapshots.json index ebfa59c43333b..fad1bd25dd647 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.snapshots.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.snapshots.json @@ -7,6 +7,15 @@ "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.tasks.json b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.tasks.json index 542a66491f9c4..31c987a5893cf 100644 --- a/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.tasks.json +++ b/src/legacy/core_plugins/console/server/api_server/spec/generated/cat.tasks.json @@ -9,6 +9,15 @@ "h": [], "help": "__flag__", "s": [], + "time": [ + "d", + "h", + "m", + "s", + "ms", + "micros", + "nanos" + ], "v": "__flag__" }, "methods": [ diff --git a/src/legacy/core_plugins/data/public/filter/apply_filters/apply_filters_popover.tsx b/src/legacy/core_plugins/data/public/filter/apply_filters/apply_filters_popover.tsx index 6270dee72ab05..ab1ed8bfb9841 100644 --- a/src/legacy/core_plugins/data/public/filter/apply_filters/apply_filters_popover.tsx +++ b/src/legacy/core_plugins/data/public/filter/apply_filters/apply_filters_popover.tsx @@ -36,6 +36,7 @@ import React, { Component } from 'react'; import { IndexPattern } from '../../index_patterns'; import { getDisplayValueFromFilter } from '../filter_bar/filter_editor/lib/filter_editor_utils'; import { getFilterDisplayText } from '../filter_bar/filter_editor/lib/get_filter_display_text'; +import { mapAndFlattenFilters } from '../filter_manager/lib/map_and_flatten_filters'; interface Props { filters: Filter[]; @@ -70,9 +71,11 @@ export class ApplyFiltersPopover extends Component { return ''; } + const mappedFilters = mapAndFlattenFilters(this.props.filters); + const form = ( - {this.props.filters.map((filter, i) => ( + {mappedFilters.map((filter, i) => ( extends Component this.updateSuggestions(`${value}`); }; - protected async updateSuggestions(value: string = '') { + protected updateSuggestions = debounce(async (value: string = '') => { const { indexPattern, field } = this.props as PhraseSuggestorProps; if (!field || !this.isSuggestingValues()) { return; @@ -73,7 +74,7 @@ export class PhraseSuggestorUI extends Component this.setState({ isLoading: true }); const suggestions = await this.services.data.getSuggestions(indexPattern.title, field, value); this.setState({ suggestions, isLoading: false }); - } + }, 500); } export const PhraseSuggestor = withKibana(PhraseSuggestorUI); diff --git a/src/legacy/core_plugins/data/public/query/query_bar/components/__snapshots__/query_bar_input.test.tsx.snap b/src/legacy/core_plugins/data/public/query/query_bar/components/__snapshots__/query_bar_input.test.tsx.snap index 92af2127932ba..06f9e6081e522 100644 --- a/src/legacy/core_plugins/data/public/query/query_bar/components/__snapshots__/query_bar_input.test.tsx.snap +++ b/src/legacy/core_plugins/data/public/query/query_bar/components/__snapshots__/query_bar_input.test.tsx.snap @@ -279,6 +279,7 @@ exports[`QueryBarInput Should disable autoFocus on EuiFieldText when disableAuto "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], @@ -824,6 +825,7 @@ exports[`QueryBarInput Should disable autoFocus on EuiFieldText when disableAuto "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], @@ -1357,6 +1359,7 @@ exports[`QueryBarInput Should pass the query language to the language switcher 1 "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], @@ -1899,6 +1902,7 @@ exports[`QueryBarInput Should pass the query language to the language switcher 1 "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], @@ -2432,6 +2436,7 @@ exports[`QueryBarInput Should render the given query 1`] = ` "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], @@ -2974,6 +2979,7 @@ exports[`QueryBarInput Should render the given query 1`] = ` "client": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "find": [MockFunction], diff --git a/src/legacy/core_plugins/data/public/shim/apply_filter_directive.html b/src/legacy/core_plugins/data/public/shim/apply_filter_directive.html deleted file mode 100644 index 0b02fcefcdbd5..0000000000000 --- a/src/legacy/core_plugins/data/public/shim/apply_filter_directive.html +++ /dev/null @@ -1,8 +0,0 @@ - diff --git a/src/legacy/core_plugins/data/public/shim/legacy_module.ts b/src/legacy/core_plugins/data/public/shim/legacy_module.ts index 9ce35e6d2fa9e..0b5ca72599208 100644 --- a/src/legacy/core_plugins/data/public/shim/legacy_module.ts +++ b/src/legacy/core_plugins/data/public/shim/legacy_module.ts @@ -20,13 +20,11 @@ import { once } from 'lodash'; import { wrapInI18nContext } from 'ui/i18n'; -import { Filter } from '@kbn/es-query'; // @ts-ignore import { uiModules } from 'ui/modules'; import { npStart } from 'ui/new_platform'; import { FilterBar, ApplyFiltersPopover } from '../filter'; -import template from './apply_filter_directive.html'; // @ts-ignore import { mapAndFlattenFilters } from '../filter/filter_manager/lib/map_and_flatten_filters'; @@ -76,35 +74,53 @@ export const initLegacyModule = once((): void => { ['pluginDataStart', { watchDepth: 'reference' }], ]); }) - .directive('applyFiltersPopoverComponent', (reactDirective: any) => - reactDirective(wrapInI18nContext(ApplyFiltersPopover)) - ) .directive('applyFiltersPopover', () => { return { - template, restrict: 'E', - scope: { - filters: '=', - onCancel: '=', - onSubmit: '=', - indexPatterns: '=', - }, - link($scope: any) { - $scope.state = {}; - - // Each time the new filters change we want to rebuild (not just re-render) the "apply filters" - // popover, because it has to reset its state whenever the new filters change. Setting a `key` - // property on the component accomplishes this due to how React handles the `key` property. - $scope.$watch('filters', (filters: any) => { - const mappedFilters: Filter[] = mapAndFlattenFilters(filters); - $scope.state = { - filters: mappedFilters, - key: Date.now(), - }; - }); + template: '', + compile: (elem: any) => { + const child = document.createElement('apply-filters-popover-helper'); + + // Copy attributes to the child directive + for (const attr of elem[0].attributes) { + child.setAttribute(attr.name, attr.value); + } + + // Add a key attribute that will force a full rerender every time that + // a filter changes. + child.setAttribute('key', 'key'); + + // Append helper directive + elem.append(child); + + const linkFn = ($scope: any, _: any, $attr: any) => { + // Watch only for filter changes to update key. + $scope.$watch( + () => { + return $scope.$eval($attr.filters) || []; + }, + (newVal: any) => { + $scope.key = Date.now(); + }, + true + ); + }; + + return linkFn; }, }; - }); + }) + .directive('applyFiltersPopoverHelper', (reactDirective: any) => + reactDirective(wrapInI18nContext(ApplyFiltersPopover), [ + ['filters', { watchDepth: 'collection' }], + ['onCancel', { watchDepth: 'reference' }], + ['onSubmit', { watchDepth: 'reference' }], + ['indexPatterns', { watchDepth: 'collection' }], + + // Key is needed to trigger a full rerender of the component + 'key', + ]) + ); const module = uiModules.get('kibana/index_patterns'); let _service: any; diff --git a/src/legacy/core_plugins/data/public/timefilter/timefilter.test.ts b/src/legacy/core_plugins/data/public/timefilter/timefilter.test.ts index 0c6b0faf3e397..cca646508b539 100644 --- a/src/legacy/core_plugins/data/public/timefilter/timefilter.test.ts +++ b/src/legacy/core_plugins/data/public/timefilter/timefilter.test.ts @@ -17,6 +17,8 @@ * under the License. */ +jest.useFakeTimers(); + jest.mock('./lib/parse_querystring', () => ({ parseQueryString: () => { return { @@ -124,23 +126,28 @@ describe('setTime', () => { describe('setRefreshInterval', () => { let update: sinon.SinonSpy; let fetch: sinon.SinonSpy; + let autoRefreshFetch: sinon.SinonSpy; let fetchSub: Subscription; let refreshSub: Subscription; + let autoRefreshSub: Subscription; beforeEach(() => { update = sinon.spy(); fetch = sinon.spy(); + autoRefreshFetch = sinon.spy(); timefilter.setRefreshInterval({ pause: false, value: 0, }); refreshSub = timefilter.getRefreshIntervalUpdate$().subscribe(update); fetchSub = timefilter.getFetch$().subscribe(fetch); + autoRefreshSub = timefilter.getAutoRefreshFetch$().subscribe(autoRefreshFetch); }); afterEach(() => { refreshSub.unsubscribe(); fetchSub.unsubscribe(); + autoRefreshSub.unsubscribe(); }); test('should update refresh interval', () => { @@ -214,6 +221,32 @@ describe('setRefreshInterval', () => { expect(update.calledTwice).to.be(true); expect(fetch.calledOnce).to.be(true); }); + + test('should start auto refresh when unpaused', () => { + timefilter.setRefreshInterval({ pause: false, value: 1000 }); + expect(autoRefreshFetch.callCount).to.be(0); + jest.advanceTimersByTime(1000); + expect(autoRefreshFetch.callCount).to.be(1); + jest.advanceTimersByTime(1000); + expect(autoRefreshFetch.callCount).to.be(2); + }); + + test('should stop auto refresh when paused', () => { + timefilter.setRefreshInterval({ pause: true, value: 1000 }); + expect(autoRefreshFetch.callCount).to.be(0); + jest.advanceTimersByTime(1000); + expect(autoRefreshFetch.callCount).to.be(0); + }); + + test('should not keep old interval when updated', () => { + timefilter.setRefreshInterval({ pause: false, value: 1000 }); + expect(autoRefreshFetch.callCount).to.be(0); + jest.advanceTimersByTime(1000); + expect(autoRefreshFetch.callCount).to.be(1); + timefilter.setRefreshInterval({ pause: false, value: 2000 }); + jest.advanceTimersByTime(2000); + expect(autoRefreshFetch.callCount).to.be(2); + }); }); describe('isTimeRangeSelectorEnabled', () => { diff --git a/src/legacy/core_plugins/data/public/timefilter/timefilter.ts b/src/legacy/core_plugins/data/public/timefilter/timefilter.ts index 70889038c0ec2..14e167b0fd56e 100644 --- a/src/legacy/core_plugins/data/public/timefilter/timefilter.ts +++ b/src/legacy/core_plugins/data/public/timefilter/timefilter.ts @@ -34,7 +34,7 @@ export class Timefilter { private timeUpdate$ = new Subject(); // Fired when a user changes the the autorefresh settings private refreshIntervalUpdate$ = new Subject(); - // Used when search poll triggers an auto refresh + // Used when an auto refresh is triggered private autoRefreshFetch$ = new Subject(); private fetch$ = new Subject(); @@ -45,6 +45,8 @@ export class Timefilter { private _isTimeRangeSelectorEnabled: boolean = false; private _isAutoRefreshSelectorEnabled: boolean = false; + private _autoRefreshIntervalId: number = 0; + constructor(config: TimefilterConfig, timeHistory: TimeHistoryContract) { this._history = timeHistory; this._time = config.timeDefaults; @@ -142,6 +144,15 @@ export class Timefilter { this.fetch$.next(); } } + + // Clear the previous auto refresh interval and start a new one (if not paused) + clearInterval(this._autoRefreshIntervalId); + if (!newRefreshInterval.pause) { + this._autoRefreshIntervalId = window.setInterval( + () => this.autoRefreshFetch$.next(), + newRefreshInterval.value + ); + } }; public createFilter = (indexPattern: IndexPattern, timeRange?: TimeRange) => { @@ -194,14 +205,6 @@ export class Timefilter { this.enabledUpdated$.next(false); }; - /** - * Added to allow search_poll to trigger an auto refresh event. - * Before this change, search_poll used to access a now private member of this instance. - */ - public notifyShouldFetch = () => { - this.autoRefreshFetch$.next(); - }; - private getForceNow = () => { const forceNow = parseQueryString().forceNow as string; if (!forceNow) { diff --git a/src/legacy/core_plugins/data/public/timefilter/timefilter_service.mock.ts b/src/legacy/core_plugins/data/public/timefilter/timefilter_service.mock.ts index fad4217acd215..2923cee60f898 100644 --- a/src/legacy/core_plugins/data/public/timefilter/timefilter_service.mock.ts +++ b/src/legacy/core_plugins/data/public/timefilter/timefilter_service.mock.ts @@ -41,7 +41,6 @@ const createSetupContractMock = () => { enableTimeRangeSelector: jest.fn(), getBounds: jest.fn(), calculateBounds: jest.fn(), - notifyShouldFetch: jest.fn(), createFilter: jest.fn(), }; diff --git a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js index 7d20d07ba05f6..cd4eac04df0e2 100644 --- a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js +++ b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js @@ -123,12 +123,12 @@ class ListControl extends Control { this.useTimeFilter, ancestorFilters ); - this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued()); + const abortSignal = this.abortController.signal; this.lastQuery = query; let resp; try { - resp = await searchSource.fetch(); + resp = await searchSource.fetch({ abortSignal }); } catch(error) { // If the fetch was aborted then no need to surface this error in the UI if (error.name === 'AbortError') return; diff --git a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js index cb1c3111addf5..7febe228d614c 100644 --- a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js +++ b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js @@ -66,11 +66,11 @@ class RangeControl extends Control { const aggs = minMaxAgg(indexPattern.fields.getByName(fieldName)); const searchSource = createSearchSource(this.kbnApi, null, indexPattern, aggs, this.useTimeFilter); - this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued()); + const abortSignal = this.abortController.signal; let resp; try { - resp = await searchSource.fetch(); + resp = await searchSource.fetch({ abortSignal }); } catch(error) { // If the fetch was aborted then no need to surface this error in the UI if (error.name === 'AbortError') return; diff --git a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts index 071861548a055..6fcfde0a5b06b 100644 --- a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts +++ b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts @@ -29,7 +29,6 @@ import chrome from 'ui/chrome'; import { TimeRange } from 'src/plugins/data/public'; import { SearchSource } from '../../../../ui/public/courier/search_source'; // @ts-ignore -import { SearchSourceProvider } from '../../../../ui/public/courier/search_source'; import { FilterBarQueryFilterProvider } from '../../../../ui/public/filter_manager/query_filter'; import { buildTabularInspectorData } from '../../../../ui/public/inspector/build_tabular_inspector_data'; @@ -100,8 +99,8 @@ const handleCourierRequest = async ({ return aggs.toDsl(metricsAtAllLevels); }); - requestSearchSource.onRequestStart((paramSearchSource: SearchSource, searchRequest: unknown) => { - return aggs.onSearchRequestStart(paramSearchSource, searchRequest); + requestSearchSource.onRequestStart((paramSearchSource: SearchSource, options: any) => { + return aggs.onSearchRequestStart(paramSearchSource, options); }); if (timeRange) { @@ -118,7 +117,7 @@ const handleCourierRequest = async ({ const queryHash = calculateObjectHash(reqBody); // We only need to reexecute the query, if forceFetch was true or the hash of the request body has changed // since the last request - const shouldQuery = forceFetch || searchSource.lastQuery !== queryHash; + const shouldQuery = forceFetch || (searchSource as any).lastQuery !== queryHash; if (shouldQuery) { inspectorAdapters.requests.reset(); @@ -139,18 +138,13 @@ const handleCourierRequest = async ({ request.stats(getRequestInspectorStats(requestSearchSource)); try { - // Abort any in-progress requests before fetching again - if (abortSignal) { - abortSignal.addEventListener('abort', () => requestSearchSource.cancelQueued()); - } - - const response = await requestSearchSource.fetch(); + const response = await requestSearchSource.fetch({ abortSignal }); - searchSource.lastQuery = queryHash; + (searchSource as any).lastQuery = queryHash; request.stats(getResponseInspectorStats(searchSource, response)).ok({ json: response }); - searchSource.rawResponse = response; + (searchSource as any).rawResponse = response; } catch (e) { // Log any error during request to the inspector request.error({ json: e }); @@ -166,7 +160,7 @@ const handleCourierRequest = async ({ // Note that rawResponse is not deeply cloned here, so downstream applications using courier // must take care not to mutate it, or it could have unintended side effects, e.g. displaying // response data incorrectly in the inspector. - let resp = searchSource.rawResponse; + let resp = (searchSource as any).rawResponse; for (const agg of aggs.aggs) { if (has(agg, 'type.postFlightRequest')) { resp = await agg.type.postFlightRequest( @@ -180,7 +174,7 @@ const handleCourierRequest = async ({ } } - searchSource.finalResponse = resp; + (searchSource as any).finalResponse = resp; const parsedTimeRange = timeRange ? getTime(aggs.indexPattern, timeRange) : null; const tabifyParams = { @@ -191,23 +185,24 @@ const handleCourierRequest = async ({ const tabifyCacheHash = calculateObjectHash({ tabifyAggs: aggs, ...tabifyParams }); // We only need to reexecute tabify, if either we did a new request or some input params to tabify changed - const shouldCalculateNewTabify = shouldQuery || searchSource.lastTabifyHash !== tabifyCacheHash; + const shouldCalculateNewTabify = + shouldQuery || (searchSource as any).lastTabifyHash !== tabifyCacheHash; if (shouldCalculateNewTabify) { - searchSource.lastTabifyHash = tabifyCacheHash; - searchSource.tabifiedResponse = tabifyAggResponse( + (searchSource as any).lastTabifyHash = tabifyCacheHash; + (searchSource as any).tabifiedResponse = tabifyAggResponse( aggs, - searchSource.finalResponse, + (searchSource as any).finalResponse, tabifyParams ); } inspectorAdapters.data.setTabularLoader( - () => buildTabularInspectorData(searchSource.tabifiedResponse, queryFilter), + () => buildTabularInspectorData((searchSource as any).tabifiedResponse, queryFilter), { returnsFormattedValues: true } ); - return searchSource.tabifiedResponse; + return (searchSource as any).tabifiedResponse; }; export const esaggs = (): ExpressionFunction => ({ @@ -249,7 +244,6 @@ export const esaggs = (): ExpressionFunction ({ const { visData, visConfig, params } = config; const visType = config.visType || visConfig.type; const $injector = await chrome.dangerouslyGetActiveInjector(); + const $rootScope = $injector.get('$rootScope') as any; const Private = $injector.get('Private') as any; const Vis = Private(VisProvider); if (handlers.vis) { // special case in visualize, we need to render first (without executing the expression), for maps to work if (visConfig) { - handlers.vis.setCurrentState({ type: visType, params: visConfig }); + $rootScope.$apply(() => { + handlers.vis.setCurrentState({ type: visType, params: visConfig }); + }); } } else { handlers.vis = new Vis({ diff --git a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts index f9f5cfe0214b2..01468eadffb84 100644 --- a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts +++ b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts @@ -17,6 +17,7 @@ * under the License. */ +import { searchSourceMock } from '../../../../../ui/public/courier/search_source/mocks'; import { SavedObjectDashboard } from '../saved_dashboard/saved_dashboard'; export function getSavedDashboardMock( @@ -26,10 +27,7 @@ export function getSavedDashboardMock( id: '123', title: 'my dashboard', panelsJSON: '[]', - searchSource: { - getOwnField: (param: any) => param, - setField: () => {}, - }, + searchSource: searchSourceMock, copyOnSave: false, timeRestore: false, timeTo: 'now', diff --git a/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app.tsx b/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app.tsx index 1f65ccebb67d9..7a0398e86a60d 100644 --- a/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app.tsx +++ b/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app.tsx @@ -96,20 +96,13 @@ export interface DashboardAppScope extends ng.IScope { timefilterSubscriptions$: Subscription; } -const app = uiModules.get('app/dashboard', [ - 'elasticsearch', - 'ngRoute', - 'react', - 'kibana/courier', - 'kibana/config', -]); +const app = uiModules.get('app/dashboard', ['elasticsearch', 'ngRoute', 'react', 'kibana/config']); app.directive('dashboardApp', function($injector: IInjector) { const AppState = $injector.get>('AppState'); const kbnUrl = $injector.get('kbnUrl'); const confirmModal = $injector.get('confirmModal'); const config = $injector.get('config'); - const courier = $injector.get<{ fetch: () => void }>('courier'); const Private = $injector.get('Private'); @@ -149,7 +142,6 @@ app.directive('dashboardApp', function($injector: IInjector) { indexPatterns, config, confirmModal, - courier, }), }; }); diff --git a/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app_controller.tsx b/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app_controller.tsx index eb49277fbc7b4..abf7b22a6e48c 100644 --- a/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app_controller.tsx +++ b/src/legacy/core_plugins/kibana/public/dashboard/dashboard_app_controller.tsx @@ -110,9 +110,7 @@ export class DashboardAppController { indexPatterns, config, confirmModal, - courier, }: { - courier: { fetch: () => void }; $scope: DashboardAppScope; $route: any; $routeParams: any; @@ -424,19 +422,21 @@ export class DashboardAppController { }; $scope.onApplyFilters = filters => { - // All filters originated from one visualization. - const indexPatternId = filters[0].meta.index; - const indexPattern = _.find( - $scope.indexPatterns, - (p: IndexPattern) => p.id === indexPatternId - ); - if (indexPattern && indexPattern.timeFieldName) { - const { timeRangeFilter, restOfFilters } = extractTimeFilter( - indexPattern.timeFieldName, - filters + if (filters.length) { + // All filters originated from one visualization. + const indexPatternId = filters[0].meta.index; + const indexPattern = _.find( + $scope.indexPatterns, + (p: IndexPattern) => p.id === indexPatternId ); - queryFilter.addFilters(restOfFilters); - if (timeRangeFilter) changeTimeFilter(timefilter, timeRangeFilter); + if (indexPattern && indexPattern.timeFieldName) { + const { timeRangeFilter, restOfFilters } = extractTimeFilter( + indexPattern.timeFieldName, + filters + ); + queryFilter.addFilters(restOfFilters); + if (timeRangeFilter) changeTimeFilter(timefilter, timeRangeFilter); + } } $scope.appState.$newFilters = []; diff --git a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html index e9424534cd9d2..6c076092c76d5 100644 --- a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html +++ b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html @@ -1,9 +1,9 @@ -
- +
+ -
-
+
+ diff --git a/src/legacy/core_plugins/kibana/public/discover/angular/discover.js b/src/legacy/core_plugins/kibana/public/discover/angular/discover.js index e517b2a02a31c..840152fc40ced 100644 --- a/src/legacy/core_plugins/kibana/public/discover/angular/discover.js +++ b/src/legacy/core_plugins/kibana/public/discover/angular/discover.js @@ -85,7 +85,6 @@ const fetchStatuses = { }; const app = uiModules.get('apps/discover', [ - 'kibana/courier', 'kibana/url', 'kibana/index_patterns' ]); @@ -185,7 +184,6 @@ function discoverController( Private, Promise, config, - courier, kbnUrl, localStorage, uiCapabilities @@ -232,7 +230,10 @@ function discoverController( // the saved savedSearch const savedSearch = $route.current.locals.savedSearch; + + let abortController; $scope.$on('$destroy', () => { + if (abortController) abortController.abort(); savedSearch.destroy(); subscriptions.unsubscribe(); }); @@ -755,7 +756,8 @@ function discoverController( $scope.updateTime(); // Abort any in-progress requests before fetching again - $scope.searchSource.cancelQueued(); + if (abortController) abortController.abort(); + abortController = new AbortController(); $scope.updateDataSource() .then(setupVisualization) @@ -763,7 +765,9 @@ function discoverController( $state.save(); $scope.fetchStatus = fetchStatuses.LOADING; logInspectorRequest(); - return $scope.searchSource.fetch(); + return $scope.searchSource.fetch({ + abortSignal: abortController.signal + }); }) .then(onResults) .catch((error) => { @@ -1042,8 +1046,8 @@ function discoverController( ); visSavedObject.vis = $scope.vis; - $scope.searchSource.onRequestStart((searchSource, searchRequest) => { - return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, searchRequest); + $scope.searchSource.onRequestStart((searchSource, options) => { + return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, options); }); $scope.searchSource.setField('aggs', function () { diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/_stubs.js b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/_stubs.js index b93cc8e936fd3..ecb22b20e4d86 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/_stubs.js +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/_stubs.js @@ -19,6 +19,7 @@ import sinon from 'sinon'; import moment from 'moment'; +import { SearchSource } from 'ui/courier'; export function createIndexPatternsStub() { return { @@ -31,7 +32,10 @@ export function createIndexPatternsStub() { }; } -export function createSearchSourceStubProvider(hits, timeField) { +/** + * A stubbed search source with a `fetch` method that returns all of `_stubHits`. + */ +export function createSearchSourceStub(hits, timeField) { const searchSourceStub = { _stubHits: hits, _stubTimeField: timeField, @@ -41,13 +45,37 @@ export function createSearchSourceStubProvider(hits, timeField) { }), }; - searchSourceStub.setParent = sinon.stub().returns(searchSourceStub); - searchSourceStub.setField = sinon.stub().returns(searchSourceStub); - searchSourceStub.getField = sinon.spy(key => { + searchSourceStub.setParent = sinon.stub(SearchSource.prototype, 'setParent').returns(searchSourceStub); + searchSourceStub.setField = sinon.stub(SearchSource.prototype, 'setField').returns(searchSourceStub); + searchSourceStub.getField = sinon.stub(SearchSource.prototype, 'getField').callsFake(key => { const previousSetCall = searchSourceStub.setField.withArgs(key).lastCall; return previousSetCall ? previousSetCall.args[1] : null; }); - searchSourceStub.fetch = sinon.spy(() => { + searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => Promise.resolve({ + hits: { + hits: searchSourceStub._stubHits, + total: searchSourceStub._stubHits.length, + }, + })); + + searchSourceStub._restore = () => { + searchSourceStub.setParent.restore(); + searchSourceStub.setField.restore(); + searchSourceStub.getField.restore(); + searchSourceStub.fetch.restore(); + }; + + return searchSourceStub; +} + +/** + * A stubbed search source with a `fetch` method that returns a filtered set of `_stubHits`. + */ +export function createContextSearchSourceStub(hits, timeField = '@timestamp') { + const searchSourceStub = createSearchSourceStub(hits, timeField); + + searchSourceStub.fetch.restore(); + searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => { const timeField = searchSourceStub._stubTimeField; const lastQuery = searchSourceStub.setField.withArgs('query').lastCall.args[1]; const timeRange = lastQuery.query.constant_score.filter.range[timeField]; @@ -71,7 +99,5 @@ export function createSearchSourceStubProvider(hits, timeField) { }); }); - return function SearchSourceStubProvider() { - return searchSourceStub; - }; + return searchSourceStub; } diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/anchor.js b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/anchor.js index 582de1c8fa74c..46e66177b516a 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/anchor.js +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/anchor.js @@ -19,55 +19,34 @@ import expect from '@kbn/expect'; import ngMock from 'ng_mock'; -import sinon from 'sinon'; -import { createIndexPatternsStub } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createSearchSourceStub } from './_stubs'; import { fetchAnchorProvider } from '../anchor'; -function createSearchSourceStubProvider(hits) { - const searchSourceStub = { - _stubHits: hits, - }; - - searchSourceStub.setParent = sinon.stub().returns(searchSourceStub); - searchSourceStub.setField = sinon.stub().returns(searchSourceStub); - searchSourceStub.fetch = sinon.spy(() => Promise.resolve({ - hits: { - hits: searchSourceStub._stubHits, - total: searchSourceStub._stubHits.length, - }, - })); - - return function SearchSourceStubProvider() { - return searchSourceStub; - }; -} - describe('context app', function () { beforeEach(ngMock.module('kibana')); describe('function fetchAnchor', function () { let fetchAnchor; - let SearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - SearchSourceStub = createSearchSourceStubProvider([ + searchSourceStub = createSearchSourceStub([ { _id: 'hit1' }, ]); - Private.stub(SearchSourceProvider, SearchSourceStub); - fetchAnchor = Private(fetchAnchorProvider); })); - it('should use the `fetch` method of the SearchSource', function () { - const searchSourceStub = new SearchSourceStub(); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should use the `fetch` method of the SearchSource', function () { return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { expect(searchSourceStub.fetch.calledOnce).to.be(true); @@ -75,8 +54,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setParentSpy = searchSourceStub.setParent; @@ -86,8 +63,6 @@ describe('context app', function () { }); it('should set the SearchSource index pattern', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setFieldSpy = searchSourceStub.setField; @@ -96,8 +71,6 @@ describe('context app', function () { }); it('should set the SearchSource version flag to true', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setVersionSpy = searchSourceStub.setField.withArgs('version'); @@ -107,8 +80,6 @@ describe('context app', function () { }); it('should set the SearchSource size to 1', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setSizeSpy = searchSourceStub.setField.withArgs('size'); @@ -118,8 +89,6 @@ describe('context app', function () { }); it('should set the SearchSource query to an ids query', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setQuerySpy = searchSourceStub.setField.withArgs('query'); @@ -140,8 +109,6 @@ describe('context app', function () { }); it('should set the SearchSource sort order', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setSortSpy = searchSourceStub.setField.withArgs('sort'); @@ -154,7 +121,6 @@ describe('context app', function () { }); it('should reject with an error when no hits were found', function () { - const searchSourceStub = new SearchSourceStub(); searchSourceStub._stubHits = []; return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) @@ -169,7 +135,6 @@ describe('context app', function () { }); it('should return the first hit after adding an anchor marker', function () { - const searchSourceStub = new SearchSourceStub(); searchSourceStub._stubHits = [ { property1: 'value1' }, { property2: 'value2' }, diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/predecessors.js b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/predecessors.js index 88efc8efc5d30..2bf3da42e24e5 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/predecessors.js +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/predecessors.js @@ -22,8 +22,7 @@ import ngMock from 'ng_mock'; import moment from 'moment'; import * as _ from 'lodash'; -import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs'; import { fetchContextProvider } from '../context'; @@ -38,16 +37,14 @@ describe('context app', function () { describe('function fetchPredecessors', function () { let fetchPredecessors; - let getSearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp', MS_PER_DAY * 8); - Private.stub(SearchSourceProvider, getSearchSourceStub); - + searchSourceStub = createContextSearchSourceStub([], '@timestamp', MS_PER_DAY * 8); fetchPredecessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => { const anchor = { _source: { @@ -69,8 +66,11 @@ describe('context app', function () { }; })); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should perform exactly one query when enough hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 2), searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 1), @@ -97,7 +97,6 @@ describe('context app', function () { }); it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3010), searchSourceStub._createStubHit(MS_PER_DAY * 3002), @@ -134,7 +133,6 @@ describe('context app', function () { }); it('should perform multiple queries until the expected hit count is returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 1700), searchSourceStub._createStubHit(MS_PER_DAY * 1200), @@ -185,8 +183,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchPredecessors( 'INDEX_PATTERN_ID', '@timestamp', @@ -206,8 +202,6 @@ describe('context app', function () { }); it('should set the tiebreaker sort order to the opposite as the time field', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchPredecessors( 'INDEX_PATTERN_ID', '@timestamp', diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/successors.js b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/successors.js index 57f7673d31183..b8bec40f2859c 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/successors.js +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/__tests__/successors.js @@ -22,8 +22,7 @@ import ngMock from 'ng_mock'; import moment from 'moment'; import * as _ from 'lodash'; -import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs'; import { fetchContextProvider } from '../context'; @@ -37,15 +36,14 @@ describe('context app', function () { describe('function fetchSuccessors', function () { let fetchSuccessors; - let getSearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp'); - Private.stub(SearchSourceProvider, getSearchSourceStub); + searchSourceStub = createContextSearchSourceStub([], '@timestamp'); fetchSuccessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => { const anchor = { @@ -68,8 +66,11 @@ describe('context app', function () { }; })); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should perform exactly one query when enough hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 5000), searchSourceStub._createStubHit(MS_PER_DAY * 4000), @@ -96,7 +97,6 @@ describe('context app', function () { }); it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3010), searchSourceStub._createStubHit(MS_PER_DAY * 3002), @@ -133,7 +133,6 @@ describe('context app', function () { }); it('should perform multiple queries until the expected hit count is returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3000), searchSourceStub._createStubHit(MS_PER_DAY * 3000 - 1), @@ -187,8 +186,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchSuccessors( 'INDEX_PATTERN_ID', '@timestamp', @@ -208,8 +205,6 @@ describe('context app', function () { }); it('should set the tiebreaker sort order to the same as the time field', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchSuccessors( 'INDEX_PATTERN_ID', '@timestamp', diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/anchor.js b/src/legacy/core_plugins/kibana/public/discover/context/api/anchor.js index bab75e14e8ed3..02a309eaa0165 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/anchor.js +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/anchor.js @@ -21,11 +21,9 @@ import _ from 'lodash'; import { i18n } from '@kbn/i18n'; -import { SearchSourceProvider } from 'ui/courier'; - -export function fetchAnchorProvider(indexPatterns, Private) { - const SearchSource = Private(SearchSourceProvider); +import { SearchSource } from 'ui/courier'; +export function fetchAnchorProvider(indexPatterns) { return async function fetchAnchor( indexPatternId, anchorId, diff --git a/src/legacy/core_plugins/kibana/public/discover/context/api/context.ts b/src/legacy/core_plugins/kibana/public/discover/context/api/context.ts index 39c7421d3b912..48ac59f1f0855 100644 --- a/src/legacy/core_plugins/kibana/public/discover/context/api/context.ts +++ b/src/legacy/core_plugins/kibana/public/discover/context/api/context.ts @@ -18,8 +18,7 @@ */ // @ts-ignore -import { SearchSourceProvider } from 'ui/courier'; -import { IPrivate } from 'ui/private'; +import { SearchSource } from 'ui/courier'; import { Filter } from '@kbn/es-query'; import { IndexPatterns, IndexPattern } from 'ui/index_patterns'; import { reverseSortDir, SortDirection } from './utils/sorting'; @@ -42,9 +41,7 @@ const DAY_MILLIS = 24 * 60 * 60 * 1000; // look from 1 day up to 10000 days into the past and future const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000].map(days => days * DAY_MILLIS); -function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) { - const SearchSourcePrivate: any = Private(SearchSourceProvider); - +function fetchContextProvider(indexPatterns: IndexPatterns) { return { fetchSurroundingDocs, }; @@ -116,7 +113,7 @@ function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) { } async function createSearchSource(indexPattern: IndexPattern, filters: Filter[]) { - return new SearchSourcePrivate() + return new SearchSource() .setParent(false) .setField('index', indexPattern) .setField('filter', filters); diff --git a/src/legacy/core_plugins/kibana/public/discover/doc_table/__tests__/actions/filter.js b/src/legacy/core_plugins/kibana/public/discover/doc_table/__tests__/actions/filter.js index d2bdec4fef5b0..1f5db791469b9 100644 --- a/src/legacy/core_plugins/kibana/public/discover/doc_table/__tests__/actions/filter.js +++ b/src/legacy/core_plugins/kibana/public/discover/doc_table/__tests__/actions/filter.js @@ -38,7 +38,6 @@ describe('doc table filter actions', function () { beforeEach(ngMock.module( 'kibana', - 'kibana/courier', function ($provide) { $provide.service('indexPatterns', require('fixtures/mock_index_patterns')); } diff --git a/src/legacy/core_plugins/kibana/public/discover/doc_viewer/doc_viewer_directive.ts b/src/legacy/core_plugins/kibana/public/discover/doc_viewer/doc_viewer_directive.ts index 202fca6ee7b52..fa6145c45f55f 100644 --- a/src/legacy/core_plugins/kibana/public/discover/doc_viewer/doc_viewer_directive.ts +++ b/src/legacy/core_plugins/kibana/public/discover/doc_viewer/doc_viewer_directive.ts @@ -22,15 +22,26 @@ import { uiModules } from 'ui/modules'; import { DocViewer } from './doc_viewer'; uiModules.get('apps/discover').directive('docViewer', (reactDirective: any) => { - return reactDirective(DocViewer, undefined, { - restrict: 'E', - scope: { - hit: '=', - indexPattern: '=', - filter: '=?', - columns: '=?', - onAddColumn: '=?', - onRemoveColumn: '=?', - }, - }); + return reactDirective( + DocViewer, + [ + 'hit', + ['indexPattern', { watchDepth: 'reference' }], + ['filter', { watchDepth: 'reference' }], + ['columns', { watchDepth: 'collection' }], + ['onAddColumn', { watchDepth: 'reference' }], + ['onRemoveColumn', { watchDepth: 'reference' }], + ], + { + restrict: 'E', + scope: { + hit: '=', + indexPattern: '=', + filter: '=?', + columns: '=?', + onAddColumn: '=?', + onRemoveColumn: '=?', + }, + } + ); }); diff --git a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts index d5bf868f3bf72..eaec11ff893ed 100644 --- a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts +++ b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts @@ -102,12 +102,13 @@ export class SearchEmbeddable extends Embeddable private inspectorAdaptors: Adapters; private searchScope?: SearchScope; private panelTitle: string = ''; - private filtersSearchSource: SearchSource; + private filtersSearchSource?: SearchSource; private searchInstance?: JQLite; private autoRefreshFetchSubscription?: Subscription; private subscription?: Subscription; public readonly type = SEARCH_EMBEDDABLE_TYPE; private filterGen: FilterManager; + private abortController?: AbortController; private prevTimeRange?: TimeRange; private prevFilters?: Filter[]; @@ -193,7 +194,7 @@ export class SearchEmbeddable extends Embeddable if (this.autoRefreshFetchSubscription) { this.autoRefreshFetchSubscription.unsubscribe(); } - this.savedSearch.searchSource.cancelQueued(); + if (this.abortController) this.abortController.abort(); } private initializeSearchScope() { @@ -273,7 +274,8 @@ export class SearchEmbeddable extends Embeddable const { searchSource } = this.savedSearch; // Abort any in-progress requests - searchSource.cancelQueued(); + if (this.abortController) this.abortController.abort(); + this.abortController = new AbortController(); searchSource.setField('size', config.get('discover:sampleSize')); searchSource.setField( @@ -299,7 +301,9 @@ export class SearchEmbeddable extends Embeddable try { // Make the request - const resp = await searchSource.fetch(); + const resp = await searchSource.fetch({ + abortSignal: this.abortController.signal, + }); this.searchScope.isLoading = false; @@ -337,8 +341,8 @@ export class SearchEmbeddable extends Embeddable searchScope.sharedItemTitle = this.panelTitle; if (isFetchRequired) { - this.filtersSearchSource.setField('filter', this.input.filters); - this.filtersSearchSource.setField('query', this.input.query); + this.filtersSearchSource!.setField('filter', this.input.filters); + this.filtersSearchSource!.setField('query', this.input.query); this.fetch(); diff --git a/src/legacy/core_plugins/kibana/public/discover/saved_searches/_saved_search.js b/src/legacy/core_plugins/kibana/public/discover/saved_searches/_saved_search.js index eed6bcad0ec5d..3903dc0845450 100644 --- a/src/legacy/core_plugins/kibana/public/discover/saved_searches/_saved_search.js +++ b/src/legacy/core_plugins/kibana/public/discover/saved_searches/_saved_search.js @@ -22,9 +22,7 @@ import { uiModules } from 'ui/modules'; import { createLegacyClass } from 'ui/utils/legacy_class'; import { SavedObjectProvider } from 'ui/saved_objects/saved_object'; -const module = uiModules.get('discover/saved_searches', [ - 'kibana/courier' -]); +const module = uiModules.get('discover/saved_searches', []); module.factory('SavedSearch', function (Private) { const SavedObject = Private(SavedObjectProvider); diff --git a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js index 1eb56403d3a78..6ae84b9c641c2 100644 --- a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js +++ b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js @@ -27,7 +27,7 @@ import { fatalError, toastNotifications } from 'ui/notify'; import uiRoutes from 'ui/routes'; import { uiModules } from 'ui/modules'; import template from './edit_index_pattern.html'; -import { FieldWildcardProvider } from 'ui/field_wildcard'; +import { fieldWildcardMatcher } from 'ui/field_wildcard'; import { IndexPatternListFactory } from 'ui/management/index_pattern_list'; import React from 'react'; import { render, unmountComponentAtNode } from 'react-dom'; @@ -173,10 +173,9 @@ uiModules.get('apps/management') .controller('managementIndexPatternsEdit', function ( $scope, $location, $route, Promise, config, indexPatterns, Private, AppState, confirmModal) { const $state = $scope.state = new AppState(); - const { fieldWildcardMatcher } = Private(FieldWildcardProvider); const indexPatternListProvider = Private(IndexPatternListFactory)(); - $scope.fieldWildcardMatcher = fieldWildcardMatcher; + $scope.fieldWildcardMatcher = (...args) => fieldWildcardMatcher(...args, config.get('metaFields')); $scope.editSectionsProvider = Private(IndicesEditSectionsProvider); $scope.kbnUrl = Private(KbnUrlProvider); $scope.indexPattern = $route.current.locals.indexPattern; diff --git a/src/legacy/core_plugins/kibana/public/management/sections/objects/components/objects_table/components/flyout/__jest__/__snapshots__/flyout.test.js.snap b/src/legacy/core_plugins/kibana/public/management/sections/objects/components/objects_table/components/flyout/__jest__/__snapshots__/flyout.test.js.snap index 6f48f0d0eb2f1..a9175e7b2a63e 100644 --- a/src/legacy/core_plugins/kibana/public/management/sections/objects/components/objects_table/components/flyout/__jest__/__snapshots__/flyout.test.js.snap +++ b/src/legacy/core_plugins/kibana/public/management/sections/objects/components/objects_table/components/flyout/__jest__/__snapshots__/flyout.test.js.snap @@ -31,6 +31,7 @@ exports[`Flyout conflicts should allow conflict resolution 1`] = ` /> ,

{props.promotedTypes.map(t => ( - <> +

{t.promotion!.description}

@@ -50,7 +50,7 @@ export function NewVisHelp(props: Props) { > {t.promotion!.buttonText} - +
))} ); diff --git a/src/legacy/core_plugins/kibana/server/lib/export/collect_references_deep.test.ts b/src/legacy/core_plugins/kibana/server/lib/export/collect_references_deep.test.ts index 89a7e2bc01818..b5e112a489ce4 100644 --- a/src/legacy/core_plugins/kibana/server/lib/export/collect_references_deep.test.ts +++ b/src/legacy/core_plugins/kibana/server/lib/export/collect_references_deep.test.ts @@ -17,10 +17,11 @@ * under the License. */ -import { SavedObject, SavedObjectsClient } from 'src/core/server'; +import { SavedObject, SavedObjectAttributes } from 'src/core/server'; import { collectReferencesDeep } from './collect_references_deep'; +import { SavedObjectsClientMock } from '../../../../../../core/server/mocks'; -const data = [ +const data: Array> = [ { id: '1', type: 'dashboard', @@ -78,6 +79,7 @@ const data = [ attributes: { title: 'pattern*', }, + references: [], }, { id: '5', @@ -100,97 +102,93 @@ const data = [ ]; test('collects dashboard and all dependencies', async () => { - const savedObjectClient = ({ - errors: {} as any, - create: jest.fn(), - bulkCreate: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - bulkGet: jest.fn(getObjects => { - return { - saved_objects: getObjects.map((obj: SavedObject) => - data.find(row => row.id === obj.id && row.type === obj.type) - ), - }; - }), - } as unknown) as SavedObjectsClient; + const savedObjectClient = SavedObjectsClientMock.create(); + savedObjectClient.bulkGet.mockImplementation(objects => { + if (!objects) { + throw new Error('Invalid test data'); + } + return Promise.resolve({ + saved_objects: objects.map( + (obj: any) => data.find(row => row.id === obj.id && row.type === obj.type)! + ), + }); + }); const objects = await collectReferencesDeep(savedObjectClient, [{ type: 'dashboard', id: '1' }]); expect(objects).toMatchInlineSnapshot(` -Array [ - Object { - "attributes": Object { - "panelsJSON": "[{\\"panelRefName\\":\\"panel_0\\"},{\\"panelRefName\\":\\"panel_1\\"}]", - }, - "id": "1", - "references": Array [ + Array [ + Object { + "attributes": Object { + "panelsJSON": "[{\\"panelRefName\\":\\"panel_0\\"},{\\"panelRefName\\":\\"panel_1\\"}]", + }, + "id": "1", + "references": Array [ + Object { + "id": "2", + "name": "panel_0", + "type": "visualization", + }, + Object { + "id": "3", + "name": "panel_1", + "type": "visualization", + }, + ], + "type": "dashboard", + }, Object { + "attributes": Object { + "kibanaSavedObjectMeta": Object { + "searchSourceJSON": "{\\"indexRefName\\":\\"kibanaSavedObjectMeta.searchSourceJSON.index\\"}", + }, + }, "id": "2", - "name": "panel_0", + "references": Array [ + Object { + "id": "4", + "name": "kibanaSavedObjectMeta.searchSourceJSON.index", + "type": "index-pattern", + }, + ], "type": "visualization", }, Object { + "attributes": Object { + "savedSearchRefName": "search_0", + }, "id": "3", - "name": "panel_1", + "references": Array [ + Object { + "id": "5", + "name": "search_0", + "type": "search", + }, + ], "type": "visualization", }, - ], - "type": "dashboard", - }, - Object { - "attributes": Object { - "kibanaSavedObjectMeta": Object { - "searchSourceJSON": "{\\"indexRefName\\":\\"kibanaSavedObjectMeta.searchSourceJSON.index\\"}", - }, - }, - "id": "2", - "references": Array [ Object { + "attributes": Object { + "title": "pattern*", + }, "id": "4", - "name": "kibanaSavedObjectMeta.searchSourceJSON.index", + "references": Array [], "type": "index-pattern", }, - ], - "type": "visualization", - }, - Object { - "attributes": Object { - "savedSearchRefName": "search_0", - }, - "id": "3", - "references": Array [ Object { + "attributes": Object { + "kibanaSavedObjectMeta": Object { + "searchSourceJSON": "{\\"indexRefName\\":\\"kibanaSavedObjectMeta.searchSourceJSON.index\\"}", + }, + }, "id": "5", - "name": "search_0", + "references": Array [ + Object { + "id": "4", + "name": "kibanaSavedObjectMeta.searchSourceJSON.index", + "type": "index-pattern", + }, + ], "type": "search", }, - ], - "type": "visualization", - }, - Object { - "attributes": Object { - "title": "pattern*", - }, - "id": "4", - "type": "index-pattern", - }, - Object { - "attributes": Object { - "kibanaSavedObjectMeta": Object { - "searchSourceJSON": "{\\"indexRefName\\":\\"kibanaSavedObjectMeta.searchSourceJSON.index\\"}", - }, - }, - "id": "5", - "references": Array [ - Object { - "id": "4", - "name": "kibanaSavedObjectMeta.searchSourceJSON.index", - "type": "index-pattern", - }, - ], - "type": "search", - }, -] -`); + ] + `); }); diff --git a/src/legacy/core_plugins/vis_type_vega/public/_vega_editor.scss b/src/legacy/core_plugins/vis_type_vega/public/_vega_editor.scss index 94ba9a9c6bc43..f4276541d5d9e 100644 --- a/src/legacy/core_plugins/vis_type_vega/public/_vega_editor.scss +++ b/src/legacy/core_plugins/vis_type_vega/public/_vega_editor.scss @@ -1,41 +1,22 @@ .visEditor--vega { .visEditorSidebar__config { padding: 0; - // Makes sure the vega options dropdown menu is visible - overflow: inherit; + position: relative; } - // The following is necessary for the Vega editor to expand to full height of the editor panel - .visEditorSidebar__config, .visEditorSidebar__options { - @include flex-parent(1, 1, auto); - - > * { - @include flex-parent(1, 1, auto); - } - } - - @include euiBreakpoint('xs', 's', 'm') { - .visEditor__collapsibleSidebar { - flex-grow: 1; - } + @include euiScrollBar; + flex-shrink: 1; + overflow-y: auto; } } - .vgaEditor { - @include flex-parent(1, 1, auto); - position: relative; - @include euiBreakpoint('xs', 's', 'm') { - min-height: $euiSize * 15; + @include euiScrollBar; + max-height: $euiSize * 15; + overflow-y: auto; } - - position: relative; -} - -.vgaEditor__aceEditor { - flex: 1 1 auto; } .vgaEditor__aceEditorActions { diff --git a/src/legacy/ui/public/courier/search_poll/index.js b/src/legacy/core_plugins/vis_type_vega/public/components/index.ts similarity index 93% rename from src/legacy/ui/public/courier/search_poll/index.js rename to src/legacy/core_plugins/vis_type_vega/public/components/index.ts index 24813e3b58ea1..90f067c778fd2 100644 --- a/src/legacy/ui/public/courier/search_poll/index.js +++ b/src/legacy/core_plugins/vis_type_vega/public/components/index.ts @@ -17,4 +17,4 @@ * under the License. */ -export { SearchPollProvider } from './search_poll'; +export { VegaVisEditor } from './vega_vis_editor'; diff --git a/src/legacy/core_plugins/vis_type_vega/public/components/vega_actions_menu.tsx b/src/legacy/core_plugins/vis_type_vega/public/components/vega_actions_menu.tsx new file mode 100644 index 0000000000000..71a88b47a8be3 --- /dev/null +++ b/src/legacy/core_plugins/vis_type_vega/public/components/vega_actions_menu.tsx @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useState, useCallback } from 'react'; +import { EuiButtonIcon, EuiContextMenuPanel, EuiContextMenuItem, EuiPopover } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { i18n } from '@kbn/i18n'; + +interface VegaActionsMenuProps { + formatHJson(): void; + formatJson(): void; +} + +function VegaActionsMenu({ formatHJson, formatJson }: VegaActionsMenuProps) { + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + + const onButtonClick = useCallback(() => setIsPopoverOpen(isOpen => !isOpen), []); + const onHJsonCLick = useCallback(() => { + formatHJson(); + setIsPopoverOpen(false); + }, [isPopoverOpen, formatHJson]); + + const onJsonCLick = useCallback(() => { + formatJson(); + setIsPopoverOpen(false); + }, [isPopoverOpen, formatJson]); + + const closePopover = useCallback(() => setIsPopoverOpen(false), []); + + const button = ( + + ); + + const items = [ + + + , + + + , + ]; + + return ( + + + + ); +} + +export { VegaActionsMenu }; diff --git a/src/legacy/core_plugins/vis_type_vega/public/components/vega_help_menu.tsx b/src/legacy/core_plugins/vis_type_vega/public/components/vega_help_menu.tsx new file mode 100644 index 0000000000000..e4443c0058e9b --- /dev/null +++ b/src/legacy/core_plugins/vis_type_vega/public/components/vega_help_menu.tsx @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useCallback, useState } from 'react'; +import { EuiButtonIcon, EuiContextMenuPanel, EuiContextMenuItem, EuiPopover } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { i18n } from '@kbn/i18n'; + +function VegaHelpMenu() { + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + const onButtonClick = useCallback(() => setIsPopoverOpen(!isPopoverOpen), [isPopoverOpen]); + + const closePopover = useCallback(() => setIsPopoverOpen(false), []); + + const button = ( + + ); + + const items = [ + + + , + + + , + + + , + ]; + + return ( + + + + ); +} + +export { VegaHelpMenu }; diff --git a/src/legacy/core_plugins/vis_type_vega/public/components/vega_vis_editor.tsx b/src/legacy/core_plugins/vis_type_vega/public/components/vega_vis_editor.tsx new file mode 100644 index 0000000000000..6d14acf6ec7aa --- /dev/null +++ b/src/legacy/core_plugins/vis_type_vega/public/components/vega_vis_editor.tsx @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useCallback } from 'react'; +import { EuiCodeEditor } from '@elastic/eui'; +import compactStringify from 'json-stringify-pretty-compact'; +// @ts-ignore +import hjson from 'hjson'; +import { i18n } from '@kbn/i18n'; + +import { toastNotifications } from 'ui/notify'; +import { VisOptionsProps } from 'ui/vis/editors/default'; +import { VisParams } from '../vega_fn'; +import { VegaHelpMenu } from './vega_help_menu'; +import { VegaActionsMenu } from './vega_actions_menu'; + +const aceOptions = { + maxLines: Infinity, + highlightActiveLine: false, + showPrintMargin: false, + tabSize: 2, + useSoftTabs: true, + wrap: true, +}; + +const hjsonStringifyOptions = { + bracesSameLine: true, + keepWsc: true, +}; + +function format(value: string, stringify: typeof compactStringify, options?: any) { + try { + const spec = hjson.parse(value, { legacyRoot: false, keepWsc: true }); + return stringify(spec, options); + } catch (err) { + // This is a common case - user tries to format an invalid HJSON text + toastNotifications.addError(err, { + title: i18n.translate('visTypeVega.editor.formatError', { + defaultMessage: 'Error formatting spec', + }), + }); + + return value; + } +} + +function VegaVisEditor({ stateParams, setValue }: VisOptionsProps) { + const onChange = useCallback( + (value: string) => { + setValue('spec', value); + }, + [setValue] + ); + + const formatJson = useCallback( + () => setValue('spec', format(stateParams.spec, compactStringify)), + [setValue, stateParams.spec] + ); + + const formatHJson = useCallback( + () => setValue('spec', format(stateParams.spec, hjson.stringify, hjsonStringifyOptions)), + [setValue, stateParams.spec] + ); + + return ( +
+ +
+ + +
+
+ ); +} + +export { VegaVisEditor }; diff --git a/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_action_menu.js b/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_action_menu.js deleted file mode 100644 index 6f136e3679f30..0000000000000 --- a/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_action_menu.js +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import PropTypes from 'prop-types'; - -import React, { - Component, -} from 'react'; - -import { - EuiButtonIcon, - EuiContextMenuPanel, - EuiContextMenuItem, - EuiPopover, -} from '@elastic/eui'; - -import { FormattedMessage } from '@kbn/i18n/react'; - -export class VegaActionsMenu extends Component { - constructor(props) { - super(props); - - this.state = { - isPopoverOpen: false, - }; - } - - onButtonClick = () => { - this.setState(prevState => ({ - isPopoverOpen: !prevState.isPopoverOpen, - })); - }; - - closePopover = () => { - this.setState({ - isPopoverOpen: false, - }); - }; - - render() { - const button = ( - - } - /> - ); - - const items = [ - ( - { this.closePopover(); this.props.formatHJson(event); }} - > - - - ), ( - { this.closePopover(); this.props.formatJson(event); }} - > - - - ) - ]; - - return ( - - - - ); - } -} - -VegaActionsMenu.propTypes = { - formatHJson: PropTypes.func.isRequired, - formatJson: PropTypes.func.isRequired, -}; diff --git a/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_help_menu.js b/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_help_menu.js deleted file mode 100644 index fe0819a729490..0000000000000 --- a/src/legacy/core_plugins/vis_type_vega/public/help_menus/vega_help_menu.js +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React, { - Component, -} from 'react'; - -import { - EuiButtonIcon, - EuiContextMenuPanel, - EuiContextMenuItem, - EuiPopover, -} from '@elastic/eui'; - -import { FormattedMessage } from '@kbn/i18n/react'; - -export class VegaHelpMenu extends Component { - constructor(props) { - super(props); - - this.state = { - isPopoverOpen: false, - }; - } - - onButtonClick = () => { - this.setState(prevState => ({ - isPopoverOpen: !prevState.isPopoverOpen, - })); - }; - - closePopover = () => { - this.setState({ - isPopoverOpen: false, - }); - }; - - render() { - const button = ( - - } - /> - ); - - const items = [ - ( - { this.closePopover(); }} - > - - - ), ( - { this.closePopover(); }} - > - - - ), ( - { this.closePopover(); }} - > - - - ) - ]; - - return ( - - - - ); - } -} diff --git a/src/legacy/core_plugins/vis_type_vega/public/shim/legacy_dependencies_plugin.ts b/src/legacy/core_plugins/vis_type_vega/public/shim/legacy_dependencies_plugin.ts index 2c4c2d2491fc4..af4425e3d5548 100644 --- a/src/legacy/core_plugins/vis_type_vega/public/shim/legacy_dependencies_plugin.ts +++ b/src/legacy/core_plugins/vis_type_vega/public/shim/legacy_dependencies_plugin.ts @@ -18,9 +18,9 @@ */ import chrome from 'ui/chrome'; +import 'ui/vis/map/service_settings'; import 'ui/es'; // required for $injector.get('es') below import { CoreStart, Plugin } from 'kibana/public'; -import { initVegaLegacyModule } from './vega_legacy_module'; /** @internal */ export interface LegacyDependenciesPluginSetup { @@ -31,9 +31,6 @@ export interface LegacyDependenciesPluginSetup { export class LegacyDependenciesPlugin implements Plugin, void> { public async setup() { - // Init kibana/vega AngularJS module. - initVegaLegacyModule(); - const $injector = await chrome.dangerouslyGetActiveInjector(); return { diff --git a/src/legacy/core_plugins/vis_type_vega/public/shim/vega_legacy_module.ts b/src/legacy/core_plugins/vis_type_vega/public/shim/vega_legacy_module.ts deleted file mode 100644 index 4dc6f03821649..0000000000000 --- a/src/legacy/core_plugins/vis_type_vega/public/shim/vega_legacy_module.ts +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import 'ngreact'; -import 'brace/mode/hjson'; -import 'brace/ext/searchbox'; -import 'ui/accessibility/kbn_ui_ace_keyboard_mode'; -import 'ui/vis/map/service_settings'; - -import { once } from 'lodash'; -// @ts-ignore -import { uiModules } from 'ui/modules'; -import { wrapInI18nContext } from 'ui/i18n'; - -// @ts-ignore -import { VegaEditorController } from '../vega_editor_controller'; -// @ts-ignore -import { VegaHelpMenu } from '../help_menus/vega_help_menu'; -// @ts-ignore -import { VegaActionsMenu } from '../help_menus/vega_action_menu'; - -/** @internal */ -export const initVegaLegacyModule = once((): void => { - uiModules - .get('kibana/vega', ['react']) - .controller('VegaEditorController', VegaEditorController) - .directive('vegaActionsMenu', (reactDirective: any) => - reactDirective(wrapInI18nContext(VegaActionsMenu)) - ) - .directive('vegaHelpMenu', (reactDirective: any) => - reactDirective(wrapInI18nContext(VegaHelpMenu)) - ); -}); diff --git a/src/legacy/core_plugins/vis_type_vega/public/vega_editor_controller.js b/src/legacy/core_plugins/vis_type_vega/public/vega_editor_controller.js deleted file mode 100644 index f91beff59a34f..0000000000000 --- a/src/legacy/core_plugins/vis_type_vega/public/vega_editor_controller.js +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import compactStringify from 'json-stringify-pretty-compact'; -import hjson from 'hjson'; -import { toastNotifications } from 'ui/notify'; -import { i18n } from '@kbn/i18n'; - -export class VegaEditorController { - constructor($scope) { - this.$scope = $scope; - $scope.aceLoaded = (editor) => { - editor.$blockScrolling = Infinity; - - const session = editor.getSession(); - session.setTabSize(2); - session.setUseSoftTabs(true); - - this.aceEditor = editor; - }; - - $scope.formatJson = (event) => { - this._format(event, compactStringify, { - maxLength: this._getCodeWidth(), - }); - }; - - $scope.formatHJson = (event) => { - this._format(event, hjson.stringify, { - condense: this._getCodeWidth(), - bracesSameLine: true, - keepWsc: true, - }); - }; - } - - _getCodeWidth() { - return this.aceEditor.getSession().getWrapLimit(); - } - - _format(event, stringify, opts) { - event.preventDefault(); - - let newSpec; - try { - const spec = hjson.parse(this.aceEditor.getSession().doc.getValue(), { legacyRoot: false, keepWsc: true }); - newSpec = stringify(spec, opts); - } catch (err) { - // This is a common case - user tries to format an invalid HJSON text - toastNotifications.addError(err, { - title: i18n.translate('visTypeVega.editor.formatError', { - defaultMessage: 'Error formatting spec', - }), - }); - return; - } - - // ui-ace only accepts changes from the editor when they - // happen outside of a digest cycle - // Per @spalger, we used $$postDigest() instead of setTimeout(() => {}, 0) - // because it better described the intention. - this.$scope.$$postDigest(() => { - // set the new value to the session doc so that it - // is treated as an edit by ace: ace adds it to the - // undo stack and emits it as a change like all - // other edits - this.aceEditor.getSession().doc.setValue(newSpec); - }); - } -} diff --git a/src/legacy/core_plugins/vis_type_vega/public/vega_editor_template.html b/src/legacy/core_plugins/vis_type_vega/public/vega_editor_template.html deleted file mode 100644 index 4d5d6189f3302..0000000000000 --- a/src/legacy/core_plugins/vis_type_vega/public/vega_editor_template.html +++ /dev/null @@ -1,30 +0,0 @@ -
-
- -
- - - -
- -
diff --git a/src/legacy/core_plugins/vis_type_vega/public/vega_type.ts b/src/legacy/core_plugins/vis_type_vega/public/vega_type.ts index 6ffcd8867ffea..0d5290ddbefc7 100644 --- a/src/legacy/core_plugins/vis_type_vega/public/vega_type.ts +++ b/src/legacy/core_plugins/vis_type_vega/public/vega_type.ts @@ -24,9 +24,9 @@ import { DefaultEditorSize } from 'ui/vis/editor_size'; // @ts-ignore import { defaultFeedbackMessage } from 'ui/vis/default_feedback_message'; -import vegaEditorTemplate from './vega_editor_template.html'; import { visFactory } from '../../visualizations/public'; import { VegaVisualizationDependencies } from './plugin'; +import { VegaVisEditor } from './components'; import { createVegaRequestHandler } from './vega_request_handler'; // @ts-ignore @@ -48,7 +48,7 @@ export const createVegaTypeDefinition = (dependencies: VegaVisualizationDependen icon: 'visVega', visConfig: { defaults: { spec: defaultSpec } }, editorConfig: { - optionsTemplate: vegaEditorTemplate, + optionsTemplate: VegaVisEditor, enableAutoApply: true, defaultSize: DefaultEditorSize.MEDIUM, }, diff --git a/src/legacy/server/sample_data/README.md b/src/legacy/server/sample_data/README.md new file mode 100644 index 0000000000000..9e93504348922 --- /dev/null +++ b/src/legacy/server/sample_data/README.md @@ -0,0 +1,20 @@ +### What happens when a user installs a sample data set? +1) Kibana deletes existing Elastic search indicies for the sample data set if they exist from previous installs. +2) Kibana creates Elasticsearch indicies with the provided field mappings. +3) Kibana uses bulk insert to ingest the new-line delimited json into the Elasticsearch index. Kibana migrates timestamps provided in new-line delimited json to the current time frame for any date field defined in `timeFields` +4) Kibana will install all saved objects for sample data set. This will override any saved objects previouslly installed for sample data set. + +Elasticsearch index names are prefixed with `kibana_sample_data_`. For more details see [createIndexName](/src/legacy/server/sample_data/routes/lib/create_index_name.js) + +Sample data sets typically provide data that spans 5 weeks from the past and 5 weeks into the future so users see data relative to `now` for a few weeks after installing sample data sets. + +### Adding new sample data sets +Use [existing sample data sets](/src/legacy/server/sample_data/data_sets) as examples. +To avoid bloating the Kibana distribution, keep data set size to a minimum. + +Follow the steps below to add new Sample data sets to Kibana. +1) Create new-line delimited json containing sample data. +2) Create file with Elasticsearch field mappings for sample data indices. +3) Create Kibana saved objects for sample data including index-patterns, visualizations, and dashboards. The best way to extract the saved objects is from the Kibana management -> saved objects [export UI](https://www.elastic.co/guide/en/kibana/current/managing-saved-objects.html#_export) +4) Define sample data spec conforming to [Data Set Schema](/src/legacy/server/sample_data/data_set_schema.js). +5) Register sample data by calling `server.registerSampleDataset(yourSpecProvider)` where `yourSpecProvider` is a function that returns an object containing your sample data spec from step 4. diff --git a/src/legacy/server/saved_objects/routes/bulk_get.test.ts b/src/legacy/server/saved_objects/routes/bulk_get.test.ts index 2d564e3044d93..546164be65c9f 100644 --- a/src/legacy/server/saved_objects/routes/bulk_get.test.ts +++ b/src/legacy/server/saved_objects/routes/bulk_get.test.ts @@ -20,22 +20,18 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createBulkGetRoute } from './bulk_get'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('POST /api/saved_objects/_bulk_get', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { - savedObjectsClient.bulkGet.mockImplementation(() => Promise.resolve('')); + savedObjectsClient.bulkGet.mockImplementation(() => + Promise.resolve({ + saved_objects: [], + }) + ); server = createMockServer(); const prereqs = { getSavedObjectsClient: { @@ -73,6 +69,7 @@ describe('POST /api/saved_objects/_bulk_get', () => { title: 'logstash-*', version: 'foo', references: [], + attributes: {}, }, ], }; diff --git a/src/legacy/server/saved_objects/routes/bulk_update.test.ts b/src/legacy/server/saved_objects/routes/bulk_update.test.ts new file mode 100644 index 0000000000000..ee74ddfc535d2 --- /dev/null +++ b/src/legacy/server/saved_objects/routes/bulk_update.test.ts @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Hapi from 'hapi'; +import { createMockServer } from './_mock_server'; +import { createBulkUpdateRoute } from './bulk_update'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; + +describe('PUT /api/saved_objects/_bulk_update', () => { + let server: Hapi.Server; + const savedObjectsClient = SavedObjectsClientMock.create(); + + beforeEach(() => { + server = createMockServer(); + + const prereqs = { + getSavedObjectsClient: { + assign: 'savedObjectsClient', + method() { + return savedObjectsClient; + }, + }, + }; + + server.route(createBulkUpdateRoute(prereqs)); + }); + + afterEach(() => { + savedObjectsClient.bulkUpdate.mockReset(); + }); + + it('formats successful response', async () => { + const request = { + method: 'PUT', + url: '/api/saved_objects/_bulk_update', + payload: [ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing visualization', + }, + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard', + }, + }, + ], + }; + + const time = Date.now().toLocaleString(); + const clientResponse = [ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + updated_at: time, + version: 'version', + references: undefined, + attributes: { + title: 'An existing visualization', + }, + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + updated_at: time, + version: 'version', + references: undefined, + attributes: { + title: 'An existing dashboard', + }, + }, + ]; + + savedObjectsClient.bulkUpdate.mockImplementation(() => + Promise.resolve({ saved_objects: clientResponse }) + ); + + const { payload, statusCode } = await server.inject(request); + const response = JSON.parse(payload); + + expect(statusCode).toBe(200); + expect(response).toEqual({ saved_objects: clientResponse }); + }); + + it('calls upon savedObjectClient.bulkUpdate', async () => { + const request = { + method: 'PUT', + url: '/api/saved_objects/_bulk_update', + payload: [ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing visualization', + }, + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard', + }, + }, + ], + }; + + savedObjectsClient.bulkUpdate.mockImplementation(() => Promise.resolve({ saved_objects: [] })); + + await server.inject(request); + + expect(savedObjectsClient.bulkUpdate).toHaveBeenCalledWith([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing visualization', + }, + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard', + }, + }, + ]); + }); +}); diff --git a/src/legacy/server/saved_objects/routes/bulk_update.ts b/src/legacy/server/saved_objects/routes/bulk_update.ts new file mode 100644 index 0000000000000..a77b0c059447f --- /dev/null +++ b/src/legacy/server/saved_objects/routes/bulk_update.ts @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Hapi from 'hapi'; +import Joi from 'joi'; +import { SavedObjectsClient, SavedObjectsBulkUpdateObject } from 'src/core/server'; +import { Prerequisites } from './types'; + +interface BulkUpdateRequest extends Hapi.Request { + pre: { + savedObjectsClient: SavedObjectsClient; + }; + payload: SavedObjectsBulkUpdateObject[]; +} + +export const createBulkUpdateRoute = (prereqs: Prerequisites) => { + return { + path: '/api/saved_objects/_bulk_update', + method: 'PUT', + config: { + pre: [prereqs.getSavedObjectsClient], + validate: { + payload: Joi.array().items( + Joi.object({ + type: Joi.string().required(), + id: Joi.string().required(), + attributes: Joi.object().required(), + version: Joi.string(), + references: Joi.array().items( + Joi.object().keys({ + name: Joi.string().required(), + type: Joi.string().required(), + id: Joi.string().required(), + }) + ), + }) + ), + }, + handler(request: BulkUpdateRequest) { + const { savedObjectsClient } = request.pre; + return savedObjectsClient.bulkUpdate(request.payload); + }, + }, + }; +}; diff --git a/src/legacy/server/saved_objects/routes/create.test.ts b/src/legacy/server/saved_objects/routes/create.test.ts index d41b621bad97a..85096228c3175 100644 --- a/src/legacy/server/saved_objects/routes/create.test.ts +++ b/src/legacy/server/saved_objects/routes/create.test.ts @@ -20,22 +20,22 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createCreateRoute } from './create'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('POST /api/saved_objects/{type}', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), + const clientResponse = { + id: 'logstash-*', + type: 'index-pattern', + title: 'logstash-*', + version: 'foo', + references: [], + attributes: {}, }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { - savedObjectsClient.create.mockImplementation(() => Promise.resolve('')); + savedObjectsClient.create.mockImplementation(() => Promise.resolve(clientResponse)); server = createMockServer(); const prereqs = { @@ -65,15 +65,6 @@ describe('POST /api/saved_objects/{type}', () => { }, }; - const clientResponse = { - type: 'index-pattern', - id: 'logstash-*', - title: 'Testing', - references: [], - }; - - savedObjectsClient.create.mockImplementation(() => Promise.resolve(clientResponse)); - const { payload, statusCode } = await server.inject(request); const response = JSON.parse(payload); diff --git a/src/legacy/server/saved_objects/routes/delete.test.ts b/src/legacy/server/saved_objects/routes/delete.test.ts index 5bd3a4a75938d..9e2adcf9d3b91 100644 --- a/src/legacy/server/saved_objects/routes/delete.test.ts +++ b/src/legacy/server/saved_objects/routes/delete.test.ts @@ -20,19 +20,11 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createDeleteRoute } from './delete'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('DELETE /api/saved_objects/{type}/{id}', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { savedObjectsClient.delete.mockImplementation(() => Promise.resolve('{}')); diff --git a/src/legacy/server/saved_objects/routes/export.test.ts b/src/legacy/server/saved_objects/routes/export.test.ts index 1b7e0dfa65db5..2670535ab995e 100644 --- a/src/legacy/server/saved_objects/routes/export.test.ts +++ b/src/legacy/server/saved_objects/routes/export.test.ts @@ -28,20 +28,15 @@ import * as exportMock from '../../../../core/server/saved_objects/export'; import { createMockServer } from './_mock_server'; import { createExportRoute } from './export'; import { createListStream } from '../../../utils/streams'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; const getSortedObjectsForExport = exportMock.getSortedObjectsForExport as jest.Mock; describe('POST /api/saved_objects/_export', () => { let server: Hapi.Server; const savedObjectsClient = { + ...SavedObjectsClientMock.create(), errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), }; beforeEach(() => { @@ -164,6 +159,7 @@ describe('POST /api/saved_objects/_export', () => { "savedObjectsClient": Object { "bulkCreate": [MockFunction], "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], "create": [MockFunction], "delete": [MockFunction], "errors": Object {}, diff --git a/src/legacy/server/saved_objects/routes/find.test.ts b/src/legacy/server/saved_objects/routes/find.test.ts index d58c32a5503b9..89cd0dd28d035 100644 --- a/src/legacy/server/saved_objects/routes/find.test.ts +++ b/src/legacy/server/saved_objects/routes/find.test.ts @@ -20,22 +20,20 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createFindRoute } from './find'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('GET /api/saved_objects/_find', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); + const clientResponse = { + total: 0, + saved_objects: [], + per_page: 0, + page: 0, + }; beforeEach(() => { - savedObjectsClient.find.mockImplementation(() => Promise.resolve('')); + savedObjectsClient.find.mockImplementation(() => Promise.resolve(clientResponse)); server = createMockServer(); const prereqs = { @@ -76,15 +74,18 @@ describe('GET /api/saved_objects/_find', () => { url: '/api/saved_objects/_find?type=index-pattern', }; - const clientResponse = { + const findResponse = { total: 2, - data: [ + per_page: 2, + page: 1, + saved_objects: [ { type: 'index-pattern', id: 'logstash-*', title: 'logstash-*', timeFieldName: '@timestamp', notExpandable: true, + attributes: {}, references: [], }, { @@ -93,18 +94,19 @@ describe('GET /api/saved_objects/_find', () => { title: 'stocks-*', timeFieldName: '@timestamp', notExpandable: true, + attributes: {}, references: [], }, ], }; - savedObjectsClient.find.mockImplementation(() => Promise.resolve(clientResponse)); + savedObjectsClient.find.mockImplementation(() => Promise.resolve(findResponse)); const { payload, statusCode } = await server.inject(request); const response = JSON.parse(payload); expect(statusCode).toBe(200); - expect(response).toEqual(clientResponse); + expect(response).toEqual(findResponse); }); it('calls upon savedObjectClient.find with defaults', async () => { diff --git a/src/legacy/server/saved_objects/routes/get.test.ts b/src/legacy/server/saved_objects/routes/get.test.ts index b67288730dacb..2f7eaea1bc770 100644 --- a/src/legacy/server/saved_objects/routes/get.test.ts +++ b/src/legacy/server/saved_objects/routes/get.test.ts @@ -20,22 +20,24 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createGetRoute } from './get'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('GET /api/saved_objects/{type}/{id}', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { - savedObjectsClient.get.mockImplementation(() => Promise.resolve('')); + savedObjectsClient.get.mockImplementation(() => + Promise.resolve({ + id: 'logstash-*', + title: 'logstash-*', + type: 'logstash-type', + attributes: {}, + timeFieldName: '@timestamp', + notExpandable: true, + references: [], + }) + ); server = createMockServer(); const prereqs = { @@ -62,6 +64,8 @@ describe('GET /api/saved_objects/{type}/{id}', () => { const clientResponse = { id: 'logstash-*', title: 'logstash-*', + type: 'logstash-type', + attributes: {}, timeFieldName: '@timestamp', notExpandable: true, references: [], diff --git a/src/legacy/server/saved_objects/routes/import.test.ts b/src/legacy/server/saved_objects/routes/import.test.ts index 5e859e7a57d8c..1a0684a35ec79 100644 --- a/src/legacy/server/saved_objects/routes/import.test.ts +++ b/src/legacy/server/saved_objects/routes/import.test.ts @@ -20,18 +20,16 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createImportRoute } from './import'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('POST /api/saved_objects/_import', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), + const savedObjectsClient = SavedObjectsClientMock.create(); + const emptyResponse = { + saved_objects: [], + total: 0, + per_page: 0, + page: 0, }; beforeEach(() => { @@ -68,7 +66,7 @@ describe('POST /api/saved_objects/_import', () => { 'content-Type': 'multipart/form-data; boundary=BOUNDARY', }, }; - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); const { payload, statusCode } = await server.inject(request); const response = JSON.parse(payload); expect(statusCode).toBe(200); @@ -95,7 +93,7 @@ describe('POST /api/saved_objects/_import', () => { 'content-Type': 'multipart/form-data; boundary=EXAMPLE', }, }; - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValueOnce({ saved_objects: [ { @@ -104,6 +102,7 @@ describe('POST /api/saved_objects/_import', () => { attributes: { title: 'my-pattern-*', }, + references: [], }, ], }); @@ -141,7 +140,7 @@ describe('POST /api/saved_objects/_import', () => { 'content-Type': 'multipart/form-data; boundary=EXAMPLE', }, }; - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValueOnce({ saved_objects: [ { @@ -150,6 +149,7 @@ describe('POST /api/saved_objects/_import', () => { attributes: { title: 'my-pattern-*', }, + references: [], }, { type: 'dashboard', @@ -157,6 +157,7 @@ describe('POST /api/saved_objects/_import', () => { attributes: { title: 'Look at my dashboard', }, + references: [], }, ], }); @@ -187,7 +188,7 @@ describe('POST /api/saved_objects/_import', () => { 'content-Type': 'multipart/form-data; boundary=EXAMPLE', }, }; - savedObjectsClient.find.mockResolvedValueOnce({ saved_objects: [] }); + savedObjectsClient.find.mockResolvedValueOnce(emptyResponse); savedObjectsClient.bulkCreate.mockResolvedValueOnce({ saved_objects: [ { @@ -256,6 +257,8 @@ describe('POST /api/saved_objects/_import', () => { statusCode: 404, message: 'Not found', }, + references: [], + attributes: {}, }, ], }); diff --git a/src/legacy/server/saved_objects/routes/index.ts b/src/legacy/server/saved_objects/routes/index.ts index 4c3d8c4163155..0afcfba308546 100644 --- a/src/legacy/server/saved_objects/routes/index.ts +++ b/src/legacy/server/saved_objects/routes/index.ts @@ -27,4 +27,5 @@ export { createImportRoute } from './import'; export { createLogLegacyImportRoute } from './log_legacy_import'; export { createResolveImportErrorsRoute } from './resolve_import_errors'; export { createUpdateRoute } from './update'; +export { createBulkUpdateRoute } from './bulk_update'; export { createExportRoute } from './export'; diff --git a/src/legacy/server/saved_objects/routes/resolve_import_errors.test.ts b/src/legacy/server/saved_objects/routes/resolve_import_errors.test.ts index 32adf18f7481c..7988165207e63 100644 --- a/src/legacy/server/saved_objects/routes/resolve_import_errors.test.ts +++ b/src/legacy/server/saved_objects/routes/resolve_import_errors.test.ts @@ -20,19 +20,11 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createResolveImportErrorsRoute } from './resolve_import_errors'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('POST /api/saved_objects/_resolve_import_errors', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { server = createMockServer(); @@ -111,6 +103,7 @@ describe('POST /api/saved_objects/_resolve_import_errors', () => { attributes: { title: 'Look at my dashboard', }, + references: [], }, ], }); @@ -153,6 +146,7 @@ describe('POST /api/saved_objects/_resolve_import_errors', () => { attributes: { title: 'Look at my dashboard', }, + references: [], }, ], }); @@ -219,6 +213,7 @@ describe('POST /api/saved_objects/_resolve_import_errors', () => { attributes: { title: 'Look at my dashboard', }, + references: [], }, ], }); diff --git a/src/legacy/server/saved_objects/routes/update.test.ts b/src/legacy/server/saved_objects/routes/update.test.ts index d1a86b1e38e74..69a6fe3030009 100644 --- a/src/legacy/server/saved_objects/routes/update.test.ts +++ b/src/legacy/server/saved_objects/routes/update.test.ts @@ -20,22 +20,23 @@ import Hapi from 'hapi'; import { createMockServer } from './_mock_server'; import { createUpdateRoute } from './update'; +import { SavedObjectsClientMock } from '../../../../core/server/mocks'; describe('PUT /api/saved_objects/{type}/{id?}', () => { let server: Hapi.Server; - const savedObjectsClient = { - errors: {} as any, - bulkCreate: jest.fn(), - bulkGet: jest.fn(), - create: jest.fn(), - delete: jest.fn(), - find: jest.fn(), - get: jest.fn(), - update: jest.fn(), - }; + const savedObjectsClient = SavedObjectsClientMock.create(); beforeEach(() => { - savedObjectsClient.update.mockImplementation(() => Promise.resolve('')); + const clientResponse = { + id: 'logstash-*', + title: 'logstash-*', + type: 'logstash-type', + attributes: {}, + timeFieldName: '@timestamp', + notExpandable: true, + references: [], + }; + savedObjectsClient.update.mockImplementation(() => Promise.resolve(clientResponse)); server = createMockServer(); const prereqs = { @@ -69,8 +70,10 @@ describe('PUT /api/saved_objects/{type}/{id?}', () => { const clientResponse = { id: 'logstash-*', title: 'logstash-*', + type: 'logstash-type', timeFieldName: '@timestamp', notExpandable: true, + attributes: {}, references: [], }; diff --git a/src/legacy/server/saved_objects/saved_objects_mixin.js b/src/legacy/server/saved_objects/saved_objects_mixin.js index 0583dea333c7d..7324a02095c67 100644 --- a/src/legacy/server/saved_objects/saved_objects_mixin.js +++ b/src/legacy/server/saved_objects/saved_objects_mixin.js @@ -39,6 +39,7 @@ import { createFindRoute, createGetRoute, createUpdateRoute, + createBulkUpdateRoute, createExportRoute, createImportRoute, createResolveImportErrorsRoute, @@ -87,6 +88,7 @@ export async function savedObjectsMixin(kbnServer, server) { }; server.route(createBulkCreateRoute(prereqs)); server.route(createBulkGetRoute(prereqs)); + server.route(createBulkUpdateRoute(prereqs)); server.route(createCreateRoute(prereqs)); server.route(createDeleteRoute(prereqs)); server.route(createFindRoute(prereqs)); diff --git a/src/legacy/server/saved_objects/saved_objects_mixin.test.js b/src/legacy/server/saved_objects/saved_objects_mixin.test.js index 45ac31ab7fc61..0e96189db4650 100644 --- a/src/legacy/server/saved_objects/saved_objects_mixin.test.js +++ b/src/legacy/server/saved_objects/saved_objects_mixin.test.js @@ -157,9 +157,9 @@ describe('Saved Objects Mixin', () => { }); describe('Routes', () => { - it('should create 11 routes', () => { + it('should create 12 routes', () => { savedObjectsMixin(mockKbnServer, mockServer); - expect(mockServer.route).toHaveBeenCalledTimes(11); + expect(mockServer.route).toHaveBeenCalledTimes(12); }); it('should add POST /api/saved_objects/_bulk_create', () => { savedObjectsMixin(mockKbnServer, mockServer); diff --git a/src/legacy/ui/public/_index.scss b/src/legacy/ui/public/_index.scss index 2ce9a0a8aa06f..98675402b43cc 100644 --- a/src/legacy/ui/public/_index.scss +++ b/src/legacy/ui/public/_index.scss @@ -13,7 +13,6 @@ @import './courier/index'; @import './collapsible_sidebar/index'; @import './directives/index'; -@import './error_allow_explicit_index/index'; @import './error_auto_create_index/index'; @import './error_url_overflow/index'; @import './exit_full_screen/index'; diff --git a/src/legacy/ui/public/agg_types/__tests__/agg_params.js b/src/legacy/ui/public/agg_types/__tests__/agg_params.js deleted file mode 100644 index d4fe2a663543a..0000000000000 --- a/src/legacy/ui/public/agg_types/__tests__/agg_params.js +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import { initParams } from '../agg_params'; -import { BaseParamType } from '../param_types/base'; -import { FieldParamType } from '../param_types/field'; -import { OptionedParamType } from '../param_types/optioned'; - -describe('AggParams class', function () { - - describe('constructor args', function () { - it('accepts an array of param defs', function () { - const params = [ - { name: 'one' }, - { name: 'two' } - ]; - const aggParams = initParams(params); - - expect(aggParams).to.have.length(params.length); - expect(aggParams).to.be.an(Array); - }); - }); - - describe('AggParam creation', function () { - it('Uses the FieldParamType class for params with the name "field"', function () { - const params = [ - { name: 'field', type: 'field' } - ]; - const aggParams = initParams(params); - - expect(aggParams).to.have.length(params.length); - expect(aggParams[0]).to.be.a(FieldParamType); - }); - - it('Uses the OptionedParamType class for params of type "optioned"', function () { - const params = [ - { - name: 'interval', - type: 'optioned' - } - ]; - const aggParams = initParams(params); - - expect(aggParams).to.have.length(params.length); - expect(aggParams[0]).to.be.a(OptionedParamType); - }); - - it('Uses the OptionedParamType class for params of type "optioned"', function () { - const params = [ - { - name: 'order', - type: 'optioned' - } - ]; - const aggParams = initParams(params); - - expect(aggParams).to.have.length(params.length); - expect(aggParams[0]).to.be.a(OptionedParamType); - }); - - it('Always converts the params to a BaseParamType', function () { - const params = [ - { - name: 'height', - editor: 'high' - }, - { - name: 'weight', - editor: 'big' - }, - { - name: 'waist', - editor: 'small' - } - ]; - const aggParams = initParams(params); - - expect(aggParams).to.have.length(params.length); - aggParams.forEach(function (aggParam) { - expect(aggParam).to.be.a(BaseParamType); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/agg_types/__tests__/agg_type.js b/src/legacy/ui/public/agg_types/__tests__/agg_type.js deleted file mode 100644 index 81daa9b54fa43..0000000000000 --- a/src/legacy/ui/public/agg_types/__tests__/agg_type.js +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import '../../private'; -import { VisProvider } from '../../vis'; -import { fieldFormats } from '../../registry/field_formats'; -import { AggType } from '../agg_type'; -import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; - -describe('AggType Class', function () { - let indexPattern; - let Vis; - - - beforeEach(ngMock.module('kibana')); - beforeEach(ngMock.inject(function (Private) { - - Vis = Private(VisProvider); - indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); - })); - - describe('constructor', function () { - - it('requires a config object as it\'s first param', function () { - expect(function () { - new AggType(null); - }).to.throwError(); - }); - - describe('application of config properties', function () { - const copiedConfigProps = [ - 'name', - 'title', - 'makeLabel', - 'ordered' - ]; - - describe('"' + copiedConfigProps.join('", "') + '"', function () { - it('assigns the config value to itself', function () { - const config = _.transform(copiedConfigProps, function (config, prop) { - config[prop] = {}; - }, {}); - - const aggType = new AggType(config); - - copiedConfigProps.forEach(function (prop) { - expect(aggType[prop]).to.be(config[prop]); - }); - }); - }); - - describe('makeLabel', function () { - it('makes a function when the makeLabel config is not specified', function () { - const someGetter = function () {}; - - let aggType = new AggType({ - makeLabel: someGetter - }); - - expect(aggType.makeLabel).to.be(someGetter); - - aggType = new AggType({ - name: 'pizza' - }); - - expect(aggType.makeLabel).to.be.a('function'); - expect(aggType.makeLabel()).to.be('pizza'); - }); - }); - - describe('getFormat', function () { - it('returns the formatter for the aggConfig', function () { - const aggType = new AggType({}); - - let vis = new Vis(indexPattern, { - aggs: [ - { - type: 'date_histogram', - schema: 'segment' - } - ] - }); - let aggConfig = vis.aggs.byName('date_histogram')[0]; - - expect(aggType.getFormat(aggConfig)).to.be(fieldFormats.getDefaultInstance('date')); - - vis = new Vis(indexPattern, { - aggs: [ - { - type: 'count', - schema: 'metric' - } - ] - }); - aggConfig = vis.aggs.byName('count')[0]; - - expect(aggType.getFormat(aggConfig)).to.be(fieldFormats.getDefaultInstance('string')); - }); - - it('can be overridden via config', function () { - const someGetter = function () {}; - - const aggType = new AggType({ - getFormat: someGetter - }); - - expect(aggType.getFormat).to.be(someGetter); - }); - }); - - describe('params', function () { - - it('defaults to AggParams object with JSON param', function () { - const aggType = new AggType({ - name: 'smart agg' - }); - - expect(aggType.params).to.be.an(Array); - expect(aggType.params.length).to.be(2); - expect(aggType.params[0].name).to.be('json'); - expect(aggType.params[1].name).to.be('customLabel'); - }); - - it('can disable customLabel', function () { - const aggType = new AggType({ - name: 'smart agg', - customLabels: false - }); - - expect(aggType.params.length).to.be(1); - expect(aggType.params[0].name).to.be('json'); - }); - - it('passes the params arg directly to the AggParams constructor', function () { - const params = [ - { name: 'one' }, - { name: 'two' } - ]; - const paramLength = params.length + 2; // json and custom label are always appended - - const aggType = new AggType({ - name: 'bucketeer', - params: params - }); - - expect(aggType.params).to.be.an(Array); - expect(aggType.params.length).to.be(paramLength); - }); - }); - - describe('getResponseAggs', function () { - it('copies the value', function () { - const football = {}; - const aggType = new AggType({ - getResponseAggs: football - }); - - expect(aggType.getResponseAggs).to.be(football); - }); - - it('defaults to noop', function () { - const aggType = new AggType({}); - const responseAggs = aggType.getRequestAggs(); - expect(responseAggs).to.be(undefined); - }); - }); - }); - - }); -}); diff --git a/src/legacy/ui/public/agg_types/__tests__/index.js b/src/legacy/ui/public/agg_types/__tests__/index.js deleted file mode 100644 index c977eb6eeb610..0000000000000 --- a/src/legacy/ui/public/agg_types/__tests__/index.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import expect from '@kbn/expect'; -import './agg_type'; -import './agg_params'; -import './buckets/_histogram'; -import './buckets/_geo_hash'; -import './buckets/_range'; -import './buckets/_terms_other_bucket_helper'; -import './buckets/date_histogram/_editor'; -import './buckets/date_histogram/_params'; -import { aggTypes } from '..'; -import { BucketAggType } from '../buckets/_bucket_agg_type'; -import { MetricAggType } from '../metrics/metric_agg_type'; - -const bucketAggs = aggTypes.buckets; -const metricAggs = aggTypes.metrics; - -describe('AggTypesComponent', function () { - - describe('bucket aggs', function () { - it('all extend BucketAggType', function () { - bucketAggs.forEach(function (bucketAgg) { - expect(bucketAgg).to.be.a(BucketAggType); - }); - }); - }); - - describe('metric aggs', function () { - it('all extend MetricAggType', function () { - metricAggs.forEach(function (metricAgg) { - expect(metricAgg).to.be.a(MetricAggType); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js index 3c8fde7eb7135..e4ca6075c624b 100644 --- a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js +++ b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js @@ -203,7 +203,6 @@ describe('parent pipeline aggs', function () { }); const searchSource = {}; - const request = {}; const customMetricSpy = sinon.spy(); const customMetric = aggConfig.params.customMetric; @@ -211,9 +210,9 @@ describe('parent pipeline aggs', function () { customMetric.type.params[0].modifyAggConfigOnSearchRequestStart = customMetricSpy; aggConfig.type.params.forEach(param => { - param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request); + param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource); }); - expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true); + expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true); }); }); }); diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js index fef69155d2351..aba5db9cedadf 100644 --- a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js +++ b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js @@ -145,7 +145,6 @@ describe('sibling pipeline aggs', function () { init(); const searchSource = {}; - const request = {}; const customMetricSpy = sinon.spy(); const customBucketSpy = sinon.spy(); const { customMetric, customBucket } = aggConfig.params; @@ -155,10 +154,10 @@ describe('sibling pipeline aggs', function () { customBucket.type.params[0].modifyAggConfigOnSearchRequestStart = customBucketSpy; aggConfig.type.params.forEach(param => { - param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request); + param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource); }); - expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true); - expect(customBucketSpy.calledWith(customBucket, searchSource, request)).to.be(true); + expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true); + expect(customBucketSpy.calledWith(customBucket, searchSource)).to.be(true); }); }); diff --git a/src/legacy/ui/public/agg_types/__tests__/param_types/_json.js b/src/legacy/ui/public/agg_types/__tests__/param_types/_json.js deleted file mode 100644 index 1876593f52956..0000000000000 --- a/src/legacy/ui/public/agg_types/__tests__/param_types/_json.js +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import expect from '@kbn/expect'; -import { BaseParamType } from '../../param_types/base'; -import { JsonParamType } from '../../param_types/json'; - -// eslint-disable-next-line import/no-default-export -export default describe('JSON', function () { - const paramName = 'json_test'; - let aggParam; - let aggConfig; - let output; - - function initParamType(config) { - config = config || {}; - const defaults = { - name: paramName, - type: 'json' - }; - - aggParam = new JsonParamType(_.defaults(config, defaults)); - } - - // fetch out deps - beforeEach(function () { - aggConfig = { params: {} }; - output = { params: {} }; - - - initParamType(); - }); - - describe('constructor', function () { - it('it is an instance of BaseParamType', function () { - expect(aggParam).to.be.a(BaseParamType); - }); - }); - - describe('write', function () { - it('should do nothing when param is not defined', function () { - expect(aggConfig.params).not.to.have.property(paramName); - - aggParam.write(aggConfig, output); - expect(output).not.to.have.property(paramName); - }); - - it('should not append param when invalid JSON', function () { - aggConfig.params[paramName] = 'i am not json'; - - aggParam.write(aggConfig, output); - expect(aggConfig.params).to.have.property(paramName); - expect(output).not.to.have.property(paramName); - }); - - it('should append param when valid JSON', function () { - const jsonData = JSON.stringify({ - new_param: 'should exist in output' - }); - - output.params.existing = 'true'; - aggConfig.params[paramName] = jsonData; - - aggParam.write(aggConfig, output); - expect(aggConfig.params).to.have.property(paramName); - expect(output.params).to.eql({ - existing: 'true', - new_param: 'should exist in output' - }); - }); - - it('should not overwrite existing params', function () { - const jsonData = JSON.stringify({ - new_param: 'should exist in output', - existing: 'should be used' - }); - - output.params.existing = 'true'; - aggConfig.params[paramName] = jsonData; - - aggParam.write(aggConfig, output); - expect(output.params).to.eql(JSON.parse(jsonData)); - }); - - it('should drop nulled params', function () { - const jsonData = JSON.stringify({ - new_param: 'should exist in output', - field: null - }); - - output.params.field = 'extensions'; - aggConfig.params[paramName] = jsonData; - - aggParam.write(aggConfig, output); - expect(Object.keys(output.params)).to.contain('new_param'); - expect(Object.keys(output.params)).to.not.contain('field'); - }); - }); -}); diff --git a/src/legacy/ui/public/agg_types/__tests__/param_types/_string.js b/src/legacy/ui/public/agg_types/__tests__/param_types/_string.js deleted file mode 100644 index 10c965a53bab5..0000000000000 --- a/src/legacy/ui/public/agg_types/__tests__/param_types/_string.js +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; -import expect from '@kbn/expect'; -import { BaseParamType } from '../../param_types/base'; -import { StringParamType } from '../../param_types/string'; - -// eslint-disable-next-line import/no-default-export -export default describe('String', function () { - const paramName = 'json_test'; - let aggParam; - let aggConfig; - let output; - - function initAggParam(config) { - config = config || {}; - const defaults = { - name: paramName, - type: 'string' - }; - - aggParam = new StringParamType(_.defaults(config, defaults)); - } - - - // fetch our deps - beforeEach(function () { - - aggConfig = { params: {} }; - output = { params: {} }; - }); - - describe('constructor', function () { - it('it is an instance of BaseParamType', function () { - initAggParam(); - expect(aggParam).to.be.a(BaseParamType); - }); - }); - - describe('write', function () { - it('should append param by name', function () { - const paramName = 'testing'; - const params = {}; - params[paramName] = 'some input'; - - initAggParam({ name: paramName }); - - aggConfig.params = params; - aggParam.write(aggConfig, output); - - expect(output.params).to.eql(params); - }); - - it('should not be in output with empty input', function () { - const paramName = 'more_testing'; - const params = {}; - params[paramName] = ''; - - initAggParam({ name: paramName }); - - aggConfig.params = params; - aggParam.write(aggConfig, output); - - expect(output.params).to.eql({}); - }); - }); -}); diff --git a/src/legacy/ui/public/agg_types/agg_config.ts b/src/legacy/ui/public/agg_types/agg_config.ts index 9898682b5d558..a5b1aa7cf9c0b 100644 --- a/src/legacy/ui/public/agg_types/agg_config.ts +++ b/src/legacy/ui/public/agg_types/agg_config.ts @@ -238,14 +238,14 @@ export class AggConfig { * @param {Courier.SearchRequest} searchRequest * @return {Promise} */ - onSearchRequestStart(searchSource: any, searchRequest: any) { + onSearchRequestStart(searchSource: any, options: any) { if (!this.type) { return Promise.resolve(); } return Promise.all( this.type.params.map((param: any) => - param.modifyAggConfigOnSearchRequestStart(this, searchSource, searchRequest) + param.modifyAggConfigOnSearchRequestStart(this, searchSource, options) ) ); } diff --git a/src/legacy/ui/public/agg_types/agg_configs.ts b/src/legacy/ui/public/agg_types/agg_configs.ts index e90d91eb7fd7f..675d37d05c33c 100644 --- a/src/legacy/ui/public/agg_types/agg_configs.ts +++ b/src/legacy/ui/public/agg_types/agg_configs.ts @@ -307,12 +307,10 @@ export class AggConfigs { return _.find(reqAgg.getResponseAggs(), { id }); } - onSearchRequestStart(searchSource: any, searchRequest: any) { + onSearchRequestStart(searchSource: any, options: any) { return Promise.all( // @ts-ignore - this.getRequestAggs().map((agg: AggConfig) => - agg.onSearchRequestStart(searchSource, searchRequest) - ) + this.getRequestAggs().map((agg: AggConfig) => agg.onSearchRequestStart(searchSource, options)) ); } } diff --git a/src/legacy/ui/public/agg_types/agg_params.test.ts b/src/legacy/ui/public/agg_types/agg_params.test.ts new file mode 100644 index 0000000000000..28d852c7f2567 --- /dev/null +++ b/src/legacy/ui/public/agg_types/agg_params.test.ts @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { AggParam, initParams } from './agg_params'; +import { BaseParamType } from './param_types/base'; +import { FieldParamType } from './param_types/field'; +import { OptionedParamType } from './param_types/optioned'; + +jest.mock('ui/new_platform'); + +describe('AggParams class', () => { + describe('constructor args', () => { + it('accepts an array of param defs', () => { + const params = [{ name: 'one' }, { name: 'two' }] as AggParam[]; + const aggParams = initParams(params); + + expect(aggParams).toHaveLength(params.length); + expect(Array.isArray(aggParams)).toBeTruthy(); + }); + }); + + describe('AggParam creation', () => { + it('Uses the FieldParamType class for params with the name "field"', () => { + const params = [{ name: 'field', type: 'field' }] as AggParam[]; + const aggParams = initParams(params); + + expect(aggParams).toHaveLength(params.length); + expect(aggParams[0] instanceof FieldParamType).toBeTruthy(); + }); + + it('Uses the OptionedParamType class for params of type "optioned"', () => { + const params = [ + { + name: 'order', + type: 'optioned', + }, + ]; + const aggParams = initParams(params); + + expect(aggParams).toHaveLength(params.length); + expect(aggParams[0] instanceof OptionedParamType).toBeTruthy(); + }); + + it('Always converts the params to a BaseParamType', function() { + const params = [ + { + name: 'height', + displayName: 'height', + }, + { + name: 'weight', + displayName: 'weight', + }, + { + name: 'waist', + displayName: 'waist', + }, + ] as AggParam[]; + + const aggParams = initParams(params); + + expect(aggParams).toHaveLength(params.length); + + aggParams.forEach(aggParam => expect(aggParam instanceof BaseParamType).toBeTruthy()); + }); + }); +}); diff --git a/src/legacy/ui/public/agg_types/agg_type.test.ts b/src/legacy/ui/public/agg_types/agg_type.test.ts new file mode 100644 index 0000000000000..1c1453b74fe98 --- /dev/null +++ b/src/legacy/ui/public/agg_types/agg_type.test.ts @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { AggType, AggTypeConfig } from './agg_type'; +import { AggConfig } from './agg_config'; + +jest.mock('ui/new_platform'); + +jest.mock('ui/registry/field_formats', () => ({ + fieldFormats: { + getDefaultInstance: jest.fn(() => 'default'), + }, +})); + +describe('AggType Class', () => { + describe('constructor', () => { + it("requires a valid config object as it's first param", () => { + expect(() => { + const aggConfig: AggTypeConfig = (undefined as unknown) as AggTypeConfig; + new AggType(aggConfig); + }).toThrowError(); + }); + + describe('application of config properties', () => { + it('assigns the config value to itself', () => { + const config: AggTypeConfig = { + name: 'name', + title: 'title', + }; + + const aggType = new AggType(config); + + expect(aggType.name).toBe('name'); + expect(aggType.title).toBe('title'); + }); + + describe('makeLabel', () => { + it('makes a function when the makeLabel config is not specified', () => { + const makeLabel = () => 'label'; + const aggConfig = {} as AggConfig; + const config: AggTypeConfig = { + name: 'name', + title: 'title', + makeLabel, + }; + + const aggType = new AggType(config); + + expect(aggType.makeLabel).toBe(makeLabel); + expect(aggType.makeLabel(aggConfig)).toBe('label'); + }); + }); + + describe('getResponseAggs/getRequestAggs', () => { + it('copies the value', () => { + const testConfig = (aggConfig: AggConfig) => [aggConfig]; + + const aggType = new AggType({ + name: 'name', + title: 'title', + getResponseAggs: testConfig, + getRequestAggs: testConfig, + }); + + expect(aggType.getResponseAggs).toBe(testConfig); + expect(aggType.getResponseAggs).toBe(testConfig); + }); + + it('defaults to noop', () => { + const aggConfig = {} as AggConfig; + const aggType = new AggType({ + name: 'name', + title: 'title', + }); + const responseAggs = aggType.getRequestAggs(aggConfig); + + expect(responseAggs).toBe(undefined); + }); + }); + + describe('params', () => { + it('defaults to AggParams object with JSON param', () => { + const aggType = new AggType({ + name: 'smart agg', + title: 'title', + }); + + expect(Array.isArray(aggType.params)).toBeTruthy(); + expect(aggType.params.length).toBe(2); + expect(aggType.params[0].name).toBe('json'); + expect(aggType.params[1].name).toBe('customLabel'); + }); + + it('can disable customLabel', () => { + const aggType = new AggType({ + name: 'smart agg', + title: 'title', + customLabels: false, + }); + + expect(aggType.params.length).toBe(1); + expect(aggType.params[0].name).toBe('json'); + }); + + it('passes the params arg directly to the AggParams constructor', () => { + const params = [{ name: 'one' }, { name: 'two' }]; + const paramLength = params.length + 2; // json and custom label are always appended + + const aggType = new AggType({ + name: 'bucketeer', + title: 'title', + params, + }); + + expect(Array.isArray(aggType.params)).toBeTruthy(); + expect(aggType.params.length).toBe(paramLength); + }); + }); + }); + + describe('getFormat', function() { + let aggConfig: AggConfig; + let field: any; + + beforeEach(() => { + aggConfig = ({ + getField: jest.fn(() => field), + } as unknown) as AggConfig; + }); + + it('returns the formatter for the aggConfig', () => { + const aggType = new AggType({ + name: 'name', + title: 'title', + }); + + field = { + format: 'format', + }; + + expect(aggType.getFormat(aggConfig)).toBe('format'); + }); + + it('returns default formatter', () => { + const aggType = new AggType({ + name: 'name', + title: 'title', + }); + + field = undefined; + + expect(aggType.getFormat(aggConfig)).toBe('default'); + }); + }); + }); +}); diff --git a/src/legacy/ui/public/agg_types/buckets/histogram.ts b/src/legacy/ui/public/agg_types/buckets/histogram.ts index 516f17be0643e..23edefc67d506 100644 --- a/src/legacy/ui/public/agg_types/buckets/histogram.ts +++ b/src/legacy/ui/public/agg_types/buckets/histogram.ts @@ -92,7 +92,7 @@ export const histogramBucketAgg = new BucketAggType({ modifyAggConfigOnSearchRequestStart( aggConfig: IBucketHistogramAggConfig, searchSource: any, - searchRequest: any + options: any ) { const field = aggConfig.getField(); const aggBody = field.scripted @@ -111,10 +111,8 @@ export const histogramBucketAgg = new BucketAggType({ }, }); - searchRequest.whenAborted(() => childSearchSource.cancelQueued()); - return childSearchSource - .fetch() + .fetch(options) .then((resp: any) => { aggConfig.setAutoBounds({ min: _.get(resp, 'aggregations.minAgg.value'), diff --git a/src/legacy/ui/public/agg_types/buckets/terms.ts b/src/legacy/ui/public/agg_types/buckets/terms.ts index ad470c8f64b84..bc6dd4860561e 100644 --- a/src/legacy/ui/public/agg_types/buckets/terms.ts +++ b/src/legacy/ui/public/agg_types/buckets/terms.ts @@ -111,9 +111,6 @@ export const termsBucketAgg = new BucketAggType({ if (aggConfig.params.otherBucket) { const filterAgg = buildOtherBucketAgg(aggConfigs, aggConfig, resp); if (!filterAgg) return resp; - if (abortSignal) { - abortSignal.addEventListener('abort', () => nestedSearchSource.cancelQueued()); - } nestedSearchSource.setField('aggs', filterAgg); @@ -134,7 +131,7 @@ export const termsBucketAgg = new BucketAggType({ }); request.stats(getRequestInspectorStats(nestedSearchSource)); - const response = await nestedSearchSource.fetch(); + const response = await nestedSearchSource.fetch({ abortSignal }); request.stats(getResponseInspectorStats(nestedSearchSource, response)).ok({ json: response }); resp = mergeOtherBucketAggResponse(aggConfigs, resp, response, aggConfig, filterAgg()); } diff --git a/src/legacy/ui/public/agg_types/__tests__/param_types/index.js b/src/legacy/ui/public/agg_types/index.test.ts similarity index 53% rename from src/legacy/ui/public/agg_types/__tests__/param_types/index.js rename to src/legacy/ui/public/agg_types/index.test.ts index 507df89960c6f..a867769a77fc1 100644 --- a/src/legacy/ui/public/agg_types/__tests__/param_types/index.js +++ b/src/legacy/ui/public/agg_types/index.test.ts @@ -17,9 +17,30 @@ * under the License. */ -import './_field'; -import './_optioned'; -import './_string'; -import './_json'; -describe('ParamTypes', function () { +import { aggTypes } from './index'; + +import { isBucketAggType } from './buckets/_bucket_agg_type'; +import { isMetricAggType } from './metrics/metric_agg_type'; + +const bucketAggs = aggTypes.buckets; +const metricAggs = aggTypes.metrics; + +jest.mock('ui/new_platform'); + +describe('AggTypesComponent', () => { + describe('bucket aggs', () => { + it('all extend BucketAggType', () => { + bucketAggs.forEach(bucketAgg => { + expect(isBucketAggType(bucketAgg)).toBeTruthy(); + }); + }); + }); + + describe('metric aggs', () => { + it('all extend MetricAggType', () => { + metricAggs.forEach(metricAgg => { + expect(isMetricAggType(metricAgg)).toBeTruthy(); + }); + }); + }); }); diff --git a/src/legacy/ui/public/agg_types/param_types/base.ts b/src/legacy/ui/public/agg_types/param_types/base.ts index 88fc24eeb53f5..bc8ed5d485bd4 100644 --- a/src/legacy/ui/public/agg_types/param_types/base.ts +++ b/src/legacy/ui/public/agg_types/param_types/base.ts @@ -46,18 +46,17 @@ export class BaseParamType implements AggParam { /** * A function that will be called before an aggConfig is serialized and sent to ES. - * Allows aggConfig to retrieve values needed for serialization by creating a {SearchRequest} + * Allows aggConfig to retrieve values needed for serialization * Example usage: an aggregation needs to know the min/max of a field to determine an appropriate interval * - * @param {AggConfig} aggconfig + * @param {AggConfig} aggConfig * @param {Courier.SearchSource} searchSource - * @param {Courier.SearchRequest} searchRequest * @returns {Promise|undefined} */ modifyAggConfigOnSearchRequestStart: ( - aggconfig: AggConfig, + aggConfig: AggConfig, searchSource?: SearchSource, - searchRequest?: any + options?: any ) => void; constructor(config: Record) { diff --git a/src/legacy/ui/public/agg_types/__tests__/param_types/_field.js b/src/legacy/ui/public/agg_types/param_types/field.test.ts similarity index 51% rename from src/legacy/ui/public/agg_types/__tests__/param_types/_field.js rename to src/legacy/ui/public/agg_types/param_types/field.test.ts index 94a976f98e984..2434f95056b78 100644 --- a/src/legacy/ui/public/agg_types/__tests__/param_types/_field.js +++ b/src/legacy/ui/public/agg_types/param_types/field.test.ts @@ -17,56 +17,74 @@ * under the License. */ -import expect from '@kbn/expect'; -import { reject } from 'lodash'; -import ngMock from 'ng_mock'; -import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; -import { BaseParamType } from '../../param_types/base'; -import { FieldParamType } from '../../param_types/field'; +import { BaseParamType } from './base'; +import { FieldParamType } from './field'; +import { ES_FIELD_TYPES, KBN_FIELD_TYPES } from '../../../../../plugins/data/common'; -describe('Field', function () { +jest.mock('ui/new_platform'); - let indexPattern; +describe('Field', () => { + const indexPattern = { + id: '1234', + title: 'logstash-*', + fields: [ + { + name: 'field1', + type: KBN_FIELD_TYPES.NUMBER, + esTypes: [ES_FIELD_TYPES.INTEGER], + aggregatable: true, + filterable: true, + searchable: true, + }, + { + name: 'field2', + type: KBN_FIELD_TYPES.STRING, + esTypes: [ES_FIELD_TYPES.TEXT], + aggregatable: false, + filterable: false, + searchable: true, + }, + ], + } as any; - beforeEach(ngMock.module('kibana')); - // fetch out deps - beforeEach(ngMock.inject(function (Private) { - indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); - })); - - describe('constructor', function () { - it('it is an instance of BaseParamType', function () { + describe('constructor', () => { + it('it is an instance of BaseParamType', () => { const aggParam = new FieldParamType({ - name: 'field', type: 'field' + name: 'field', + type: 'field', }); - expect(aggParam).to.be.a(BaseParamType); + expect(aggParam instanceof BaseParamType).toBeTruthy(); }); }); - describe('getAvailableFields', function () { - it('should return only aggregatable fields by default', function () { + describe('getAvailableFields', () => { + it('should return only aggregatable fields by default', () => { const aggParam = new FieldParamType({ - name: 'field', type: 'field' + name: 'field', + type: 'field', }); const fields = aggParam.getAvailableFields(indexPattern.fields); - expect(fields).to.not.have.length(0); + + expect(fields.length).toBe(1); + for (const field of fields) { - expect(field.aggregatable).to.be(true); + expect(field.aggregatable).toBe(true); } }); - it('should return all fields if onlyAggregatable is false', function () { + it('should return all fields if onlyAggregatable is false', () => { const aggParam = new FieldParamType({ - name: 'field', type: 'field' + name: 'field', + type: 'field', }); aggParam.onlyAggregatable = false; const fields = aggParam.getAvailableFields(indexPattern.fields); - const nonAggregatableFields = reject(fields, 'aggregatable'); - expect(nonAggregatableFields).to.not.be.empty(); + + expect(fields.length).toBe(2); }); }); }); diff --git a/src/legacy/ui/public/agg_types/param_types/json.test.ts b/src/legacy/ui/public/agg_types/param_types/json.test.ts new file mode 100644 index 0000000000000..fb31385505a76 --- /dev/null +++ b/src/legacy/ui/public/agg_types/param_types/json.test.ts @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { BaseParamType } from './base'; +import { JsonParamType } from './json'; +import { AggConfig } from 'ui/agg_types'; + +jest.mock('ui/new_platform'); + +describe('JSON', function() { + const paramName = 'json_test'; + let aggConfig: AggConfig; + let output: Record; + + function initAggParam(config: Record = {}) { + return new JsonParamType({ + ...config, + type: 'json', + name: paramName, + }); + } + + beforeEach(function() { + aggConfig = { params: {} } as AggConfig; + output = { params: {} }; + }); + + describe('constructor', () => { + it('it is an instance of BaseParamType', () => { + const aggParam = initAggParam(); + + expect(aggParam instanceof BaseParamType).toBeTruthy(); + }); + }); + + describe('write', () => { + it('should do nothing when param is not defined', () => { + const aggParam = initAggParam(); + + expect(aggConfig.params).not.toHaveProperty(paramName); + + aggParam.write(aggConfig, output); + expect(output).not.toHaveProperty(paramName); + }); + + it('should not append param when invalid JSON', () => { + const aggParam = initAggParam(); + + aggConfig.params[paramName] = 'i am not json'; + + aggParam.write(aggConfig, output); + expect(aggConfig.params).toHaveProperty(paramName); + expect(output).not.toHaveProperty(paramName); + }); + + it('should append param when valid JSON', () => { + const aggParam = initAggParam(); + const jsonData = JSON.stringify({ + new_param: 'should exist in output', + }); + + output.params.existing = 'true'; + aggConfig.params[paramName] = jsonData; + + aggParam.write(aggConfig, output); + expect(aggConfig.params).toHaveProperty(paramName); + + expect(output.params).toEqual({ + existing: 'true', + new_param: 'should exist in output', + }); + }); + + it('should not overwrite existing params', () => { + const aggParam = initAggParam(); + const jsonData = JSON.stringify({ + new_param: 'should exist in output', + existing: 'should be used', + }); + + output.params.existing = 'true'; + aggConfig.params[paramName] = jsonData; + + aggParam.write(aggConfig, output); + expect(output.params).toEqual(JSON.parse(jsonData)); + }); + + it('should drop nulled params', () => { + const aggParam = initAggParam(); + const jsonData = JSON.stringify({ + new_param: 'should exist in output', + field: null, + }); + + output.params.field = 'extensions'; + aggConfig.params[paramName] = jsonData; + + aggParam.write(aggConfig, output); + expect(Object.keys(output.params)).toContain('new_param'); + expect(Object.keys(output.params)).not.toContain('field'); + }); + }); +}); diff --git a/src/legacy/ui/public/agg_types/__tests__/param_types/_optioned.js b/src/legacy/ui/public/agg_types/param_types/optioned.test.ts similarity index 71% rename from src/legacy/ui/public/agg_types/__tests__/param_types/_optioned.js rename to src/legacy/ui/public/agg_types/param_types/optioned.test.ts index 4e66f6cfbd41b..6b58d81914097 100644 --- a/src/legacy/ui/public/agg_types/__tests__/param_types/_optioned.js +++ b/src/legacy/ui/public/agg_types/param_types/optioned.test.ts @@ -17,20 +17,20 @@ * under the License. */ -import expect from '@kbn/expect'; -import { BaseParamType } from '../../param_types/base'; -import { OptionedParamType } from '../../param_types/optioned'; +import { BaseParamType } from './base'; +import { OptionedParamType } from './optioned'; -describe('Optioned', function () { +jest.mock('ui/new_platform'); - describe('constructor', function () { - it('it is an instance of BaseParamType', function () { +describe('Optioned', () => { + describe('constructor', () => { + it('it is an instance of BaseParamType', () => { const aggParam = new OptionedParamType({ name: 'some_param', - type: 'optioned' + type: 'optioned', }); - expect(aggParam).to.be.a(BaseParamType); + expect(aggParam instanceof BaseParamType).toBeTruthy(); }); }); }); diff --git a/src/legacy/ui/public/agg_types/param_types/string.test.ts b/src/legacy/ui/public/agg_types/param_types/string.test.ts new file mode 100644 index 0000000000000..3d496ecf898e4 --- /dev/null +++ b/src/legacy/ui/public/agg_types/param_types/string.test.ts @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { BaseParamType } from './base'; +import { StringParamType } from './string'; +import { AggConfig } from 'ui/agg_types'; + +jest.mock('ui/new_platform'); + +describe('String', function() { + let paramName = 'json_test'; + let aggConfig: AggConfig; + let output: Record; + + function initAggParam(config: Record = {}) { + return new StringParamType({ + ...config, + type: 'string', + name: paramName, + }); + } + + beforeEach(() => { + aggConfig = { params: {} } as AggConfig; + output = { params: {} }; + }); + + describe('constructor', () => { + it('it is an instance of BaseParamType', () => { + const aggParam = initAggParam(); + + expect(aggParam instanceof BaseParamType).toBeTruthy(); + }); + }); + + describe('write', () => { + it('should append param by name', () => { + const params = { + [paramName]: 'some input', + }; + + const aggParam = initAggParam({ name: paramName }); + + aggConfig.params = params; + aggParam.write(aggConfig, output); + + expect(output.params).toEqual(params); + }); + + it('should not be in output with empty input', () => { + paramName = 'more_testing'; + + const params = { + [paramName]: '', + }; + + const aggParam = initAggParam({ name: paramName }); + + aggConfig.params = params; + aggParam.write(aggConfig, output); + + expect(output.params).toEqual({}); + }); + }); +}); diff --git a/src/legacy/ui/public/agg_types/__tests__/utils.test.tsx b/src/legacy/ui/public/agg_types/utils.test.tsx similarity index 81% rename from src/legacy/ui/public/agg_types/__tests__/utils.test.tsx rename to src/legacy/ui/public/agg_types/utils.test.tsx index 655b606bb46b0..a3c7f24f3927d 100644 --- a/src/legacy/ui/public/agg_types/__tests__/utils.test.tsx +++ b/src/legacy/ui/public/agg_types/utils.test.tsx @@ -17,7 +17,7 @@ * under the License. */ -import { isValidJson } from '../utils'; +import { isValidJson } from './utils'; jest.mock('ui/new_platform'); @@ -29,23 +29,23 @@ const input = { describe('AggType utils', () => { describe('isValidJson', () => { it('should return true when empty string', () => { - expect(isValidJson('')).toBe(true); + expect(isValidJson('')).toBeTruthy(); }); it('should return true when undefine', () => { - expect(isValidJson(undefined as any)).toBe(true); + expect(isValidJson(undefined as any)).toBeTruthy(); }); it('should return false when invalid string', () => { - expect(isValidJson(input.invalid)).toBe(false); + expect(isValidJson(input.invalid)).toBeFalsy(); }); it('should return true when valid string', () => { - expect(isValidJson(input.valid)).toBe(true); + expect(isValidJson(input.valid)).toBeTruthy(); }); it('should return false if a number', () => { - expect(isValidJson('0')).toBe(false); + expect(isValidJson('0')).toBeFalsy(); }); }); }); diff --git a/src/legacy/ui/public/autoload/modules.js b/src/legacy/ui/public/autoload/modules.js index 5594de8cf67cd..d662d479fc86b 100644 --- a/src/legacy/ui/public/autoload/modules.js +++ b/src/legacy/ui/public/autoload/modules.js @@ -20,7 +20,6 @@ import 'angular'; import '../chrome'; import '../config'; -import '../courier'; import '../es'; import '../notify'; import '../private'; diff --git a/src/legacy/ui/public/chrome/directives/kbn_chrome.html b/src/legacy/ui/public/chrome/directives/kbn_chrome.html index ced89287d310f..541082e68de58 100644 --- a/src/legacy/ui/public/chrome/directives/kbn_chrome.html +++ b/src/legacy/ui/public/chrome/directives/kbn_chrome.html @@ -1,9 +1,9 @@
-
+ >
diff --git a/src/legacy/ui/public/courier/courier.js b/src/legacy/ui/public/courier/courier.js deleted file mode 100644 index a317932e51118..0000000000000 --- a/src/legacy/ui/public/courier/courier.js +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import _ from 'lodash'; - -import { timefilter } from 'ui/timefilter'; - -import '../es'; -import '../directives/listen'; -import { uiModules } from '../modules'; -import { addFatalErrorCallback } from '../notify'; -import '../promises'; - -import { searchRequestQueue } from './search_request_queue'; -import { FetchSoonProvider } from './fetch'; -import { SearchPollProvider } from './search_poll'; - -uiModules.get('kibana/courier').service('courier', ($rootScope, Private) => { - const fetchSoon = Private(FetchSoonProvider); - - // This manages the doc fetch interval. - const searchPoll = Private(SearchPollProvider); - - class Courier { - constructor() { - // Listen for refreshInterval changes - const updateRefreshInterval = () => { - const refreshIntervalMs = _.get(timefilter.getRefreshInterval(), 'value'); - const isRefreshPaused = _.get(timefilter.getRefreshInterval(), 'pause'); - - // Update the time between automatic search requests. - searchPoll.setIntervalInMs(refreshIntervalMs); - - if (isRefreshPaused) { - searchPoll.pause(); - } else { - searchPoll.resume(); - } - }; - - const refreshIntervalSubscription = timefilter.getRefreshIntervalUpdate$().subscribe(updateRefreshInterval); - - const closeOnFatal = _.once(() => { - // If there was a fatal error, then stop future searches. We want to use pause instead of - // clearTimer because if the search results come back after the fatal error then we'll - // resume polling. - searchPoll.pause(); - - // And abort all pending requests. - searchRequestQueue.abortAll(); - - if (searchRequestQueue.getCount()) { - throw new Error('Aborting all pending requests failed.'); - } - - refreshIntervalSubscription.unsubscribe(); - }); - - addFatalErrorCallback(closeOnFatal); - updateRefreshInterval(); - } - - /** - * Fetch the pending requests. - */ - fetch() { - fetchSoon.fetchQueued().then(() => { - // Reset the timer using the time that we get this response as the starting point. - searchPoll.resetTimer(); - }); - } - } - - return new Courier(); -}); diff --git a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js b/src/legacy/ui/public/courier/fetch/__tests__/call_client.js deleted file mode 100644 index 1a473446df872..0000000000000 --- a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js +++ /dev/null @@ -1,349 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import NoDigestPromises from 'test_utils/no_digest_promises'; -import { delay } from 'bluebird'; - -import { CallClientProvider } from '../call_client'; -import { RequestStatus } from '../req_status'; -import { SearchRequestProvider } from '../request'; -import { addSearchStrategy } from '../../search_strategy'; - -describe('callClient', () => { - NoDigestPromises.activateForSuite(); - - const ABORTED = RequestStatus.ABORTED; - - let SearchRequest; - let callClient; - let fakeSearch; - let searchRequests; - let esRequestDelay; - let esShouldError; - let esPromiseAbortSpy; - - const createSearchRequest = (id, overrides = {}, errorHandler = () => {}) => { - const { source: overrideSource, ...rest } = overrides; - - const source = { - _flatten: () => Promise.resolve({ - index: id - }), - requestIsStopped: () => {}, - getField: () => 'indexPattern', - getPreferredSearchStrategyId: () => undefined, - ...overrideSource - }; - - const searchRequest = new SearchRequest({ source, errorHandler, ...rest }); - searchRequest.__testId__ = id; - return searchRequest; - }; - - beforeEach(ngMock.module('kibana')); - - beforeEach(ngMock.module(function stubEs($provide) { - esRequestDelay = 0; - esShouldError = false; - - $provide.service('es', (Promise) => { - fakeSearch = sinon.spy(({ index }) => { - const esPromise = new Promise((resolve, reject) => { - if (esShouldError) { - return reject('fake es error'); - } - - setTimeout(() => { - resolve(index); - }, esRequestDelay); - }); - - esPromise.abort = esPromiseAbortSpy = sinon.spy(); - return esPromise; - }); - - return { - search: fakeSearch - }; - }); - })); - - beforeEach(ngMock.inject(Private => { - callClient = Private(CallClientProvider); - SearchRequest = Private(SearchRequestProvider); - })); - - describe('basic contract', () => { - it('returns a promise', () => { - searchRequests = [ createSearchRequest() ]; - const callingClient = callClient(searchRequests); - expect(callingClient.then).to.be.a('function'); - }); - - it(`resolves the promise with the 'responses' property of the es.search() result`, () => { - searchRequests = [ createSearchRequest(1) ]; - - return callClient(searchRequests).then(results => { - expect(results).to.eql([1]); - }); - }); - - describe('for failing requests', () => { - beforeEach(() => { - addSearchStrategy({ - id: 'fail', - isViable: indexPattern => { - return indexPattern.type === 'fail'; - }, - search: () => { - return { - searching: Promise.reject(new Error('Search failed')), - failedSearchRequests: [], - abort: () => {}, - }; - }, - }); - }); - - it(`still bubbles up the failure`, () => { - const searchRequestFail1 = createSearchRequest('fail1', { - source: { - getField: () => ({ type: 'fail' }), - }, - }); - - const searchRequestFail2 = createSearchRequest('fail2', { - source: { - getField: () => ({ type: 'fail' }), - }, - }); - - searchRequests = [ searchRequestFail1, searchRequestFail2 ]; - - return callClient(searchRequests).then(results => { - expect(results).to.eql([ - { error: new Error('Search failed') }, - { error: new Error('Search failed') }, - ]); - }); - }); - }); - }); - - describe('implementation', () => { - it('calls searchRequest.whenAborted() as part of setup', async () => { - const whenAbortedSpy = sinon.spy(); - const searchRequest = createSearchRequest(); - searchRequest.whenAborted = whenAbortedSpy; - searchRequests = [ searchRequest ]; - - return callClient(searchRequests).then(() => { - expect(whenAbortedSpy.callCount).to.be(1); - }); - }); - }); - - describe('aborting at different points in the request lifecycle:', () => { - it('while the search body is being formed rejects with an AbortError', () => { - const searchRequest = createSearchRequest(1, { - source: { - _flatten: () => { - return new Promise(resolve => { - setTimeout(() => { - resolve({}); - }, 100); - }); - }, - requestIsStopped: () => {}, - }, - }); - - searchRequests = [ searchRequest ]; - const callingClient = callClient(searchRequests); - - // Abort the request while the search body is being formed. - setTimeout(() => { - searchRequest.abort(); - }, 20); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - - it('while the search is in flight rejects with an AbortError', () => { - esRequestDelay = 100; - - const searchRequest = createSearchRequest(); - searchRequests = [ searchRequest ]; - const callingClient = callClient(searchRequests); - - // Abort the request while the search is in flight.. - setTimeout(() => { - searchRequest.abort(); - }, 80); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - }); - - describe('aborting number of requests:', () => { - it(`aborting all searchRequests rejects with an AbortError`, () => { - const searchRequest1 = createSearchRequest(); - const searchRequest2 = createSearchRequest(); - searchRequests = [ searchRequest1, searchRequest2 ]; - const callingClient = callClient(searchRequests); - - searchRequest1.abort(); - searchRequest2.abort(); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - - it(`aborting all searchRequests calls abort() on the promise returned by searchStrategy.search()`, () => { - esRequestDelay = 100; - - const searchRequest1 = createSearchRequest(); - const searchRequest2 = createSearchRequest(); - searchRequests = [ searchRequest1, searchRequest2 ]; - - const callingClient = callClient(searchRequests); - - return Promise.all([ - delay(70).then(() => { - // At this point we expect the request to be in flight. - expect(esPromiseAbortSpy.callCount).to.be(0); - searchRequest1.abort(); - searchRequest2.abort(); - }), - callingClient.catch(() => { - expect(esPromiseAbortSpy.callCount).to.be(1); - }), - ]); - }); - - it('aborting some searchRequests rejects with an AbortError', () => { - const searchRequest1 = createSearchRequest(1); - const searchRequest2 = createSearchRequest(2); - searchRequests = [ searchRequest1, searchRequest2 ]; - const callingClient = callClient(searchRequests); - searchRequest2.abort(); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - }); - - describe('searchRequests with multiple searchStrategies map correctly to their responses', () => { - const search = ({ searchRequests }) => { - return { - searching: Promise.resolve(searchRequests.map(searchRequest => searchRequest.__testId__)), - failedSearchRequests: [], - abort: () => {}, - }; - }; - - const searchStrategyA = { - id: 'a', - isViable: indexPattern => { - return indexPattern.type === 'a'; - }, - search, - }; - - const searchStrategyB = { - id: 'b', - isViable: indexPattern => { - return indexPattern.type === 'b'; - }, - search, - }; - - let searchRequestA; - let searchRequestB; - let searchRequestA2; - - beforeEach(() => { - addSearchStrategy(searchStrategyA); - addSearchStrategy(searchStrategyB); - - searchRequestA = createSearchRequest('a', { - source: { - getField: () => ({ type: 'a' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - - searchRequestB = createSearchRequest('b', { - source: { - getField: () => ({ type: 'b' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - - searchRequestA2 = createSearchRequest('a2', { - source: { - getField: () => ({ type: 'a' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - }); - - it('if the searchRequests are reordered by the searchStrategies', () => { - // Add requests in an order which will be reordered by the strategies. - searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', 'a2']); - }); - }); - - it('if one is aborted after being provided', () => { - // Add requests in an order which will be reordered by the strategies. - searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - searchRequestA2.abort(); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', ABORTED]); - }); - }); - - it(`if one is already aborted when it's provided`, () => { - searchRequests = [ searchRequestA, searchRequestB, ABORTED, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', ABORTED, 'a2']); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js b/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js deleted file mode 100644 index 19032ce1f4ca3..0000000000000 --- a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; - -import { CallClientProvider } from '../call_client'; -import { CallResponseHandlersProvider } from '../call_response_handlers'; -import { ContinueIncompleteProvider } from '../continue_incomplete'; -import { FetchNowProvider } from '../fetch_now'; - -function mockRequest() { - return { - strategy: 'mock', - started: true, - aborted: false, - handleFailure: sinon.spy(), - retry: sinon.spy(function () { return this; }), - continue: sinon.spy(function () { return this; }), - start: sinon.spy(function () { return this; }) - }; -} - -describe('FetchNowProvider', () => { - - let Promise; - let $rootScope; - let fetchNow; - let request; - let requests; - let fakeResponses; - - beforeEach(ngMock.module('kibana', (PrivateProvider) => { - function FakeResponsesProvider(Promise) { - fakeResponses = sinon.spy(function () { - return Promise.map(requests, mockRequest => { - return { mockRequest }; - }); - }); - return fakeResponses; - } - - PrivateProvider.swap(CallClientProvider, FakeResponsesProvider); - PrivateProvider.swap(CallResponseHandlersProvider, FakeResponsesProvider); - PrivateProvider.swap(ContinueIncompleteProvider, FakeResponsesProvider); - })); - - beforeEach(ngMock.inject((Private, $injector) => { - $rootScope = $injector.get('$rootScope'); - Promise = $injector.get('Promise'); - fetchNow = Private(FetchNowProvider); - request = mockRequest(); - requests = [ request ]; - })); - - describe('when request has not started', () => { - beforeEach(() => requests.forEach(req => req.started = false)); - - it('starts request', () => { - fetchNow(requests); - expect(request.start.called).to.be(true); - expect(request.continue.called).to.be(false); - }); - - it('waits for returned promise from start() to be fulfilled', () => { - request.start = sinon.stub().returns(Promise.resolve(request)); - fetchNow(requests); - - expect(request.start.callCount).to.be(1); - expect(fakeResponses.callCount).to.be(0); - $rootScope.$apply(); - expect(fakeResponses.callCount).to.be(3); - }); - - it('invokes request failure handler if starting fails', () => { - request.start = sinon.stub().returns(Promise.reject('some error')); - fetchNow(requests); - $rootScope.$apply(); - sinon.assert.calledWith(request.handleFailure, 'some error'); - }); - }); - - describe('when request has already started', () => { - it('continues request', () => { - fetchNow(requests); - expect(request.start.called).to.be(false); - expect(request.continue.called).to.be(true); - }); - it('waits for returned promise to be fulfilled', () => { - request.continue = sinon.stub().returns(Promise.resolve(request)); - fetchNow(requests); - - expect(request.continue.callCount).to.be(1); - expect(fakeResponses.callCount).to.be(0); - $rootScope.$apply(); - expect(fakeResponses.callCount).to.be(3); - }); - it('invokes request failure handler if continuing fails', () => { - request.continue = sinon.stub().returns(Promise.reject('some error')); - fetchNow(requests); - $rootScope.$apply(); - sinon.assert.calledWith(request.handleFailure, 'some error'); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/call_client.js b/src/legacy/ui/public/courier/fetch/call_client.js index 7ba73e741c074..971ae4c49a604 100644 --- a/src/legacy/ui/public/courier/fetch/call_client.js +++ b/src/legacy/ui/public/courier/fetch/call_client.js @@ -17,187 +17,37 @@ * under the License. */ -import { ErrorAllowExplicitIndexProvider } from '../../error_allow_explicit_index'; -import { assignSearchRequestsToSearchStrategies } from '../search_strategy'; -import { IsRequestProvider } from './is_request'; -import { RequestStatus } from './req_status'; -import { SerializeFetchParamsProvider } from './request/serialize_fetch_params'; -import { i18n } from '@kbn/i18n'; -import { createDefer } from 'ui/promises'; - -export function CallClientProvider(Private, Promise, es, config, sessionId, esShardTimeout) { - const errorAllowExplicitIndex = Private(ErrorAllowExplicitIndexProvider); - const isRequest = Private(IsRequestProvider); - const serializeFetchParams = Private(SerializeFetchParamsProvider); - - const ABORTED = RequestStatus.ABORTED; - - function callClient(searchRequests) { - // get the actual list of requests that we will be fetching - const requestsToFetch = searchRequests.filter(isRequest); - let requestsToFetchCount = requestsToFetch.length; - - if (requestsToFetchCount === 0) { - return Promise.resolve([]); - } - - // This is how we'll provide the consumer with search responses. Resolved by - // respondToSearchRequests. - const defer = createDefer(Promise); - - const abortableSearches = []; - let areAllSearchRequestsAborted = false; - - // When we traverse our search requests and send out searches, some of them may fail. We'll - // store those that don't fail here. - const activeSearchRequests = []; - - // Respond to each searchRequest with the response or ABORTED. - const respondToSearchRequests = (responsesInOriginalRequestOrder = []) => { - // We map over searchRequests because if we were originally provided an ABORTED - // request then we'll return that value. - return Promise.map(searchRequests, function (searchRequest, searchRequestIndex) { - if (searchRequest.aborted) { - return ABORTED; - } - - const status = searchRequests[searchRequestIndex]; - - if (status === ABORTED) { - return ABORTED; - } - - const activeSearchRequestIndex = activeSearchRequests.indexOf(searchRequest); - const isFailedSearchRequest = activeSearchRequestIndex === -1; - - if (isFailedSearchRequest) { - return ABORTED; - } - - return responsesInOriginalRequestOrder[searchRequestIndex]; - }) - .then( - (res) => defer.resolve(res), - (err) => defer.reject(err) - ); - }; - - // handle a request being aborted while being fetched - const requestWasAborted = Promise.method(function (searchRequest, index) { - if (searchRequests[index] === ABORTED) { - defer.reject(new Error( - i18n.translate('common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage', { - defaultMessage: 'Request was aborted twice?', - }) - )); - } - - requestsToFetchCount--; - - if (requestsToFetchCount !== 0) { - // We can't resolve early unless all searchRequests have been aborted. - return; - } - - abortableSearches.forEach(({ abort }) => { - abort(); - }); - - areAllSearchRequestsAborted = true; - - return respondToSearchRequests(); - }); - - // attach abort handlers, close over request index - searchRequests.forEach(function (searchRequest, index) { - if (!isRequest(searchRequest)) { - return; - } - - searchRequest.whenAborted(function () { - requestWasAborted(searchRequest, index).catch(defer.reject); - }); - }); - - const searchStrategiesWithRequests = assignSearchRequestsToSearchStrategies(requestsToFetch); - - // We're going to create a new async context here, so that the logic within it can execute - // asynchronously after we've returned a reference to defer.promise. - Promise.resolve().then(async () => { - // Execute each request using its search strategy. - for (let i = 0; i < searchStrategiesWithRequests.length; i++) { - const searchStrategyWithSearchRequests = searchStrategiesWithRequests[i]; - const { searchStrategy, searchRequests } = searchStrategyWithSearchRequests; - const { - searching, - abort, - failedSearchRequests, - } = await searchStrategy.search({ searchRequests, es, Promise, serializeFetchParams, config, sessionId, esShardTimeout }); - - // Collect searchRequests which have successfully been sent. - searchRequests.forEach(searchRequest => { - if (failedSearchRequests.includes(searchRequest)) { - return; - } - - activeSearchRequests.push(searchRequest); - }); - - abortableSearches.push({ - searching, - abort, - requestsCount: searchRequests.length, - }); - } - - try { - // The request was aborted while we were doing the above logic. - if (areAllSearchRequestsAborted) { - return; - } - - const segregatedResponses = await Promise.all(abortableSearches.map(async ({ searching, requestsCount }) => { - return searching.catch((e) => { - // Duplicate errors so that they correspond to the original requests. - return new Array(requestsCount).fill({ error: e }); - }); - })); - - // Assigning searchRequests to strategies means that the responses come back in a different - // order than the original searchRequests. So we'll put them back in order so that we can - // use the order to associate each response with the original request. - const responsesInOriginalRequestOrder = new Array(searchRequests.length); - segregatedResponses.forEach((responses, strategyIndex) => { - responses.forEach((response, responseIndex) => { - const searchRequest = searchStrategiesWithRequests[strategyIndex].searchRequests[responseIndex]; - const requestIndex = searchRequests.indexOf(searchRequest); - responsesInOriginalRequestOrder[requestIndex] = response; - }); - }); - - await respondToSearchRequests(responsesInOriginalRequestOrder); - } catch(error) { - if (errorAllowExplicitIndex.test(error)) { - return errorAllowExplicitIndex.takeover(); - } - - defer.reject(error); - } +import { groupBy } from 'lodash'; +import { getSearchStrategyForSearchRequest, getSearchStrategyById } from '../search_strategy'; +import { handleResponse } from './handle_response'; + +export function callClient(searchRequests, requestsOptions = [], { es, config, esShardTimeout } = {}) { + // Correlate the options with the request that they're associated with + const requestOptionEntries = searchRequests.map((request, i) => [request, requestsOptions[i]]); + const requestOptionsMap = new Map(requestOptionEntries); + + // Group the requests by the strategy used to search that specific request + const searchStrategyMap = groupBy(searchRequests, (request, i) => { + const searchStrategy = getSearchStrategyForSearchRequest(request, requestsOptions[i]); + return searchStrategy.id; + }); + + // Execute each search strategy with the group of requests, but return the responses in the same + // order in which they were received. We use a map to correlate the original request with its + // response. + const requestResponseMap = new Map(); + Object.keys(searchStrategyMap).forEach(searchStrategyId => { + const searchStrategy = getSearchStrategyById(searchStrategyId); + const requests = searchStrategyMap[searchStrategyId]; + const { searching, abort } = searchStrategy.search({ searchRequests: requests, es, config, esShardTimeout }); + requests.forEach((request, i) => { + const response = searching.then(results => handleResponse(request, results[i])); + const { abortSignal } = requestOptionsMap.get(request) || {}; + if (abortSignal) abortSignal.addEventListener('abort', abort); + requestResponseMap.set(request, response); }); + }, []); + return searchRequests.map(request => requestResponseMap.get(request)); +} - // Return the promise which acts as our vehicle for providing search responses to the consumer. - // However, if there are any errors, notify the searchRequests of them *instead* of bubbling - // them up to the consumer. - return defer.promise.catch((err) => { - // By returning the return value of this catch() without rethrowing the error, we delegate - // error-handling to the searchRequest instead of the consumer. - searchRequests.forEach((searchRequest, index) => { - if (searchRequests[index] !== ABORTED) { - searchRequest.handleFailure(err); - } - }); - }); - } - return callClient; -} diff --git a/src/legacy/ui/public/courier/fetch/call_client.test.js b/src/legacy/ui/public/courier/fetch/call_client.test.js new file mode 100644 index 0000000000000..463d6c59e479e --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/call_client.test.js @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { callClient } from './call_client'; +import { handleResponse } from './handle_response'; + +const mockResponses = [{}, {}]; +const mockAbortFns = [jest.fn(), jest.fn()]; +const mockSearchFns = [ + jest.fn(({ searchRequests }) => ({ + searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[0])), + abort: mockAbortFns[0] + })), + jest.fn(({ searchRequests }) => ({ + searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[1])), + abort: mockAbortFns[1] + })) +]; +const mockSearchStrategies = mockSearchFns.map((search, i) => ({ search, id: i })); + +jest.mock('./handle_response', () => ({ + handleResponse: jest.fn((request, response) => response) +})); + +jest.mock('../search_strategy', () => ({ + getSearchStrategyForSearchRequest: request => mockSearchStrategies[request._searchStrategyId], + getSearchStrategyById: id => mockSearchStrategies[id] +})); + +describe('callClient', () => { + beforeEach(() => { + handleResponse.mockClear(); + mockAbortFns.forEach(fn => fn.mockClear()); + mockSearchFns.forEach(fn => fn.mockClear()); + }); + + test('Executes each search strategy with its group of matching requests', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }, { + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + + callClient(searchRequests); + + expect(mockSearchFns[0]).toBeCalled(); + expect(mockSearchFns[0].mock.calls[0][0].searchRequests).toEqual([searchRequests[0], searchRequests[2]]); + expect(mockSearchFns[1]).toBeCalled(); + expect(mockSearchFns[1].mock.calls[0][0].searchRequests).toEqual([searchRequests[1], searchRequests[3]]); + }); + + test('Passes the additional arguments it is given to the search strategy', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }]; + const args = { es: {}, config: {}, esShardTimeout: 0 }; + + callClient(searchRequests, [], args); + + expect(mockSearchFns[0]).toBeCalled(); + expect(mockSearchFns[0].mock.calls[0][0]).toEqual({ searchRequests, ...args }); + }); + + test('Returns the responses in the original order', async () => { + const searchRequests = [{ + _searchStrategyId: 1 + }, { + _searchStrategyId: 0 + }]; + + const responses = await Promise.all(callClient(searchRequests)); + + expect(responses).toEqual([mockResponses[1], mockResponses[0]]); + }); + + test('Calls handleResponse with each request and response', async () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + + const responses = callClient(searchRequests); + await Promise.all(responses); + + expect(handleResponse).toBeCalledTimes(2); + expect(handleResponse).toBeCalledWith(searchRequests[0], mockResponses[0]); + expect(handleResponse).toBeCalledWith(searchRequests[1], mockResponses[1]); + }); + + test('If passed an abortSignal, calls abort on the strategy if the signal is aborted', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + const abortController = new AbortController(); + const requestOptions = [{ + abortSignal: abortController.signal + }]; + + callClient(searchRequests, requestOptions); + abortController.abort(); + + expect(mockAbortFns[0]).toBeCalled(); + expect(mockAbortFns[1]).not.toBeCalled(); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/call_response_handlers.js b/src/legacy/ui/public/courier/fetch/call_response_handlers.js deleted file mode 100644 index aaf82168e385f..0000000000000 --- a/src/legacy/ui/public/courier/fetch/call_response_handlers.js +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import React from 'react'; -import { i18n } from '@kbn/i18n'; -import { EuiSpacer } from '@elastic/eui'; -import { toastNotifications } from '../../notify'; -import { RequestFailure } from './errors'; -import { RequestStatus } from './req_status'; -import { SearchError } from '../search_strategy/search_error'; -import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button'; - -export function CallResponseHandlersProvider(Promise) { - const ABORTED = RequestStatus.ABORTED; - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function callResponseHandlers(searchRequests, responses) { - return Promise.map(searchRequests, function (searchRequest, index) { - if (searchRequest === ABORTED || searchRequest.aborted) { - return ABORTED; - } - - const response = responses[index]; - - if (response.timed_out) { - toastNotifications.addWarning({ - title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', { - defaultMessage: 'Data might be incomplete because your request timed out', - }), - }); - } - - if (response._shards && response._shards.failed) { - const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', { - defaultMessage: '{shardsFailed} of {shardsTotal} shards failed', - values: { - shardsFailed: response._shards.failed, - shardsTotal: response._shards.total, - }, - }); - const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', { - defaultMessage: 'The data you are seeing might be incomplete or wrong.', - }); - - const text = ( - <> - {description} - - - - ); - - toastNotifications.addWarning({ - title, - text, - }); - } - - function progress() { - if (searchRequest.isIncomplete()) { - return INCOMPLETE; - } - - searchRequest.complete(); - return response; - } - - if (response.error) { - if (searchRequest.filterError(response)) { - return progress(); - } else { - return searchRequest.handleFailure( - response.error instanceof SearchError - ? response.error - : new RequestFailure(null, response) - ); - } - } - - return Promise.try(() => searchRequest.handleResponse(response)).then(progress); - }); - } - - return callResponseHandlers; -} diff --git a/src/legacy/ui/public/courier/fetch/continue_incomplete.js b/src/legacy/ui/public/courier/fetch/continue_incomplete.js deleted file mode 100644 index b40ebdb886748..0000000000000 --- a/src/legacy/ui/public/courier/fetch/continue_incomplete.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { RequestStatus } from './req_status'; - -export function ContinueIncompleteProvider() { - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function continueIncompleteRequests(searchRequests, responses, fetchSearchResults) { - const incompleteSearchRequests = []; - - responses.forEach(function (response, index) { - if (response === INCOMPLETE) { - incompleteSearchRequests.push(searchRequests[index]); - } - }); - - if (!incompleteSearchRequests.length) { - return responses; - } - - return fetchSearchResults(incompleteSearchRequests) - .then(function (completedResponses) { - return responses.map(function (prevResponse) { - if (prevResponse !== INCOMPLETE) { - return prevResponse; - } - - return completedResponses.shift(); - }); - }); - } - - return continueIncompleteRequests; -} diff --git a/src/legacy/ui/public/courier/fetch/fetch_now.js b/src/legacy/ui/public/courier/fetch/fetch_now.js deleted file mode 100644 index de5704d4380f4..0000000000000 --- a/src/legacy/ui/public/courier/fetch/fetch_now.js +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { fatalError } from '../../notify'; -import { CallClientProvider } from './call_client'; -import { CallResponseHandlersProvider } from './call_response_handlers'; -import { ContinueIncompleteProvider } from './continue_incomplete'; -import { RequestStatus } from './req_status'; -import { i18n } from '@kbn/i18n'; - -/** - * Fetch now provider should be used if you want the results searched and returned immediately. - * This can be slightly inefficient if a large number of requests are queued up, we can batch these - * by using fetchSoon. This introduces a slight delay which allows other requests to queue up before - * sending out requests in a batch. - * - * @param Private - * @param Promise - * @return {fetchNow} - * @constructor - */ -export function FetchNowProvider(Private, Promise) { - // core tasks - const callClient = Private(CallClientProvider); - const callResponseHandlers = Private(CallResponseHandlersProvider); - const continueIncomplete = Private(ContinueIncompleteProvider); - - const ABORTED = RequestStatus.ABORTED; - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function fetchNow(searchRequests) { - return fetchSearchResults(searchRequests.map(function (searchRequest) { - if (!searchRequest.started) { - return searchRequest; - } - - return searchRequest.retry(); - })) - .catch(error => { - // If any errors occur after the search requests have resolved, then we kill Kibana. - fatalError(error, 'Courier fetch'); - }); - } - - function fetchSearchResults(searchRequests) { - function replaceAbortedRequests() { - searchRequests = searchRequests.map(searchRequest => { - if (searchRequest.aborted) { - return ABORTED; - } - - return searchRequest; - }); - } - - replaceAbortedRequests(); - return startRequests(searchRequests) - .then(function () { - replaceAbortedRequests(); - return callClient(searchRequests) - .catch(() => { - // Silently swallow errors that result from search requests so the consumer can surface - // them as notifications instead of courier forcing fatal errors. - }); - }) - .then(function (responses) { - replaceAbortedRequests(); - return callResponseHandlers(searchRequests, responses); - }) - .then(function (responses) { - replaceAbortedRequests(); - return continueIncomplete(searchRequests, responses, fetchSearchResults); - }) - .then(function (responses) { - replaceAbortedRequests(); - return responses.map(function (resp) { - switch (resp) { - case ABORTED: - return null; - case INCOMPLETE: - throw new Error( - i18n.translate('common.ui.courier.fetch.failedToClearRequestErrorMessage', { - defaultMessage: 'Failed to clear incomplete or duplicate request from responses.', - }) - ); - default: - return resp; - } - }); - }); - } - - function startRequests(searchRequests) { - return Promise.map(searchRequests, function (searchRequest) { - if (searchRequest === ABORTED) { - return searchRequest; - } - - return new Promise(function (resolve) { - const action = searchRequest.started ? searchRequest.continue : searchRequest.start; - resolve(action.call(searchRequest)); - }) - .catch(err => searchRequest.handleFailure(err)); - }); - } - - return fetchNow; -} diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.js b/src/legacy/ui/public/courier/fetch/fetch_soon.js index 266d4a6d3c9e6..ef02beddcb59a 100644 --- a/src/legacy/ui/public/courier/fetch/fetch_soon.js +++ b/src/legacy/ui/public/courier/fetch/fetch_soon.js @@ -17,41 +17,54 @@ * under the License. */ -import _ from 'lodash'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchNowProvider } from './fetch_now'; +import { callClient } from './call_client'; /** - * This is usually the right fetch provider to use, rather than FetchNowProvider, as this class introduces - * a slight delay in the request process to allow multiple requests to queue up (e.g. when a dashboard - * is loading). + * This function introduces a slight delay in the request process to allow multiple requests to queue + * up (e.g. when a dashboard is loading). */ -export function FetchSoonProvider(Private, Promise, config) { - - const fetchNow = Private(FetchNowProvider); +export async function fetchSoon(request, options, { es, config, esShardTimeout }) { + const delay = config.get('courier:batchSearches') ? 50 : 0; + return delayedFetch(request, options, { es, config, esShardTimeout }, delay); +} - const fetch = () => fetchNow(searchRequestQueue.getPending()); - const debouncedFetch = _.debounce(fetch, { - wait: 10, - maxWait: 50 +/** + * Delays executing a function for a given amount of time, and returns a promise that resolves + * with the result. + * @param fn The function to invoke + * @param ms The number of milliseconds to wait + * @return Promise A promise that resolves with the result of executing the function + */ +function delay(fn, ms) { + return new Promise(resolve => { + setTimeout(() => resolve(fn()), ms); }); +} - /** - * Fetch a list of requests - * @param {array} requests - the requests to fetch - * @async - */ - this.fetchSearchRequests = (requests) => { - requests.forEach(req => req._setFetchRequested()); - config.get('courier:batchSearches') ? debouncedFetch() : fetch(); - return Promise.all(requests.map(req => req.getCompletePromise())); - }; +// The current batch/queue of requests to fetch +let requestsToFetch = []; +let requestOptions = []; - /** - * Return a promise that resembles the success of the fetch completing so we can execute - * logic based on this state change. Individual errors are routed to their respective requests. +// The in-progress fetch (if there is one) +let fetchInProgress = null; + +/** + * Delay fetching for a given amount of time, while batching up the requests to be fetched. + * Returns a promise that resolves with the response for the given request. + * @param request The request to fetch + * @param ms The number of milliseconds to wait (and batch requests) + * @return Promise The response for the given request */ - this.fetchQueued = () => { - return this.fetchSearchRequests(searchRequestQueue.getStartable()); - }; +async function delayedFetch(request, options, { es, config, esShardTimeout }, ms) { + const i = requestsToFetch.length; + requestsToFetch = [...requestsToFetch, request]; + requestOptions = [...requestOptions, options]; + const responses = await (fetchInProgress = fetchInProgress || delay(() => { + const response = callClient(requestsToFetch, requestOptions, { es, config, esShardTimeout }); + requestsToFetch = []; + requestOptions = []; + fetchInProgress = null; + return response; + }, ms)); + return responses[i]; } diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.test.js b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js new file mode 100644 index 0000000000000..824a4ab7e12e3 --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { fetchSoon } from './fetch_soon'; +import { callClient } from './call_client'; + +function getMockConfig(config) { + const entries = Object.entries(config); + return new Map(entries); +} + +const mockResponses = { + 'foo': {}, + 'bar': {}, + 'baz': {}, +}; + +jest.useFakeTimers(); + +jest.mock('./call_client', () => ({ + callClient: jest.fn(requests => { + // Allow a request object to specify which mockResponse it wants to receive (_mockResponseId) + // in addition to how long to simulate waiting before returning a response (_waitMs) + const responses = requests.map(request => { + const waitMs = requests.reduce((total, request) => request._waitMs || 0, 0); + return new Promise(resolve => { + resolve(mockResponses[request._mockResponseId]); + }, waitMs); + }); + return Promise.resolve(responses); + }) +})); + +describe('fetchSoon', () => { + beforeEach(() => { + callClient.mockClear(); + }); + + test('should delay by 0ms if config is set to not batch searches', () => { + const config = getMockConfig({ + 'courier:batchSearches': false + }); + const request = {}; + const options = {}; + + fetchSoon(request, options, { config }); + + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(0); + expect(callClient).toBeCalled(); + }); + + test('should delay by 50ms if config is set to batch searches', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const request = {}; + const options = {}; + + fetchSoon(request, options, { config }); + + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(0); + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(50); + expect(callClient).toBeCalled(); + }); + + test('should send a batch of requests to callClient', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const requests = [{ foo: 1 }, { foo: 2 }]; + const options = [{ bar: 1 }, { bar: 2 }]; + + requests.forEach((request, i) => { + fetchSoon(request, options[i], { config }); + }); + + jest.advanceTimersByTime(50); + expect(callClient).toBeCalledTimes(1); + expect(callClient.mock.calls[0][0]).toEqual(requests); + expect(callClient.mock.calls[0][1]).toEqual(options); + }); + + test('should return the response to the corresponding call for multiple batched requests', async () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const requests = [{ _mockResponseId: 'foo' }, { _mockResponseId: 'bar' }]; + + const promises = requests.map(request => { + return fetchSoon(request, {}, { config }); + }); + jest.advanceTimersByTime(50); + const results = await Promise.all(promises); + + expect(results).toEqual([mockResponses.foo, mockResponses.bar]); + }); + + test('should wait for the previous batch to start before starting a new batch', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const firstBatch = [{ foo: 1 }, { foo: 2 }]; + const secondBatch = [{ bar: 1 }, { bar: 2 }]; + + firstBatch.forEach(request => { + fetchSoon(request, {}, { config }); + }); + jest.advanceTimersByTime(50); + secondBatch.forEach(request => { + fetchSoon(request, {}, { config }); + }); + + expect(callClient).toBeCalledTimes(1); + expect(callClient.mock.calls[0][0]).toEqual(firstBatch); + + jest.advanceTimersByTime(50); + + expect(callClient).toBeCalledTimes(2); + expect(callClient.mock.calls[1][0]).toEqual(secondBatch); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.js b/src/legacy/ui/public/courier/fetch/get_search_params.js index 7561661d321fa..dd55201ba5540 100644 --- a/src/legacy/ui/public/courier/fetch/get_search_params.js +++ b/src/legacy/ui/public/courier/fetch/get_search_params.js @@ -17,6 +17,8 @@ * under the License. */ +const sessionId = Date.now(); + export function getMSearchParams(config) { return { rest_total_hits_as_int: true, @@ -25,13 +27,13 @@ export function getMSearchParams(config) { }; } -export function getSearchParams(config, sessionId, esShardTimeout) { +export function getSearchParams(config, esShardTimeout) { return { rest_total_hits_as_int: true, ignore_unavailable: true, ignore_throttled: getIgnoreThrottled(config), max_concurrent_shard_requests: getMaxConcurrentShardRequests(config), - preference: getPreference(config, sessionId), + preference: getPreference(config), timeout: getTimeout(esShardTimeout), }; } @@ -45,7 +47,7 @@ export function getMaxConcurrentShardRequests(config) { return maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined; } -export function getPreference(config, sessionId) { +export function getPreference(config) { const setRequestPreference = config.get('courier:setRequestPreference'); if (setRequestPreference === 'sessionId') return sessionId; return setRequestPreference === 'custom' ? config.get('courier:customRequestPreference') : undefined; diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.test.js b/src/legacy/ui/public/courier/fetch/get_search_params.test.js index 9129aea05f428..380d1da963ddf 100644 --- a/src/legacy/ui/public/courier/fetch/get_search_params.test.js +++ b/src/legacy/ui/public/courier/fetch/get_search_params.test.js @@ -99,10 +99,10 @@ describe('getSearchParams', () => { test('includes timeout according to esShardTimeout if greater than 0', () => { const config = getConfigStub(); - let searchParams = getSearchParams(config, null, 0); + let searchParams = getSearchParams(config, 0); expect(searchParams.timeout).toBe(undefined); - searchParams = getSearchParams(config, null, 100); + searchParams = getSearchParams(config, 100); expect(searchParams.timeout).toBe('100ms'); }); }); diff --git a/src/legacy/ui/public/courier/fetch/handle_response.js b/src/legacy/ui/public/courier/fetch/handle_response.js new file mode 100644 index 0000000000000..fb2797369d78f --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/handle_response.js @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +import React from 'react'; +import { toastNotifications } from '../../notify/toasts'; +import { i18n } from '@kbn/i18n'; +import { EuiSpacer } from '@elastic/eui'; +import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button'; + +export function handleResponse(request, response) { + if (response.timed_out) { + toastNotifications.addWarning({ + title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', { + defaultMessage: 'Data might be incomplete because your request timed out', + }), + }); + } + + if (response._shards && response._shards.failed) { + const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', { + defaultMessage: '{shardsFailed} of {shardsTotal} shards failed', + values: { + shardsFailed: response._shards.failed, + shardsTotal: response._shards.total, + }, + }); + const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', { + defaultMessage: 'The data you are seeing might be incomplete or wrong.', + }); + + const text = ( + <> + {description} + + + + ); + + toastNotifications.addWarning({ + title, + text, + }); + } + + return response; +} diff --git a/src/legacy/ui/public/courier/fetch/handle_response.test.js b/src/legacy/ui/public/courier/fetch/handle_response.test.js new file mode 100644 index 0000000000000..0836832e6c05a --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/handle_response.test.js @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { handleResponse } from './handle_response'; +import { toastNotifications } from '../../notify/toasts'; + +jest.mock('../../notify/toasts', () => { + return { + toastNotifications: { + addWarning: jest.fn() + } + }; +}); + +jest.mock('@kbn/i18n', () => { + return { + i18n: { + translate: (id, { defaultMessage }) => defaultMessage + } + }; +}); + +describe('handleResponse', () => { + beforeEach(() => { + toastNotifications.addWarning.mockReset(); + }); + + test('should notify if timed out', () => { + const request = { body: {} }; + const response = { + timed_out: true + }; + const result = handleResponse(request, response); + expect(result).toBe(response); + expect(toastNotifications.addWarning).toBeCalled(); + expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('request timed out'); + }); + + test('should notify if shards failed', () => { + const request = { body: {} }; + const response = { + _shards: { + failed: true + } + }; + const result = handleResponse(request, response); + expect(result).toBe(response); + expect(toastNotifications.addWarning).toBeCalled(); + expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('shards failed'); + }); + + test('returns the response', () => { + const request = {}; + const response = {}; + const result = handleResponse(request, response); + expect(result).toBe(response); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/index.js b/src/legacy/ui/public/courier/fetch/index.js index a5daaca5cb2c3..7b89dea1a110c 100644 --- a/src/legacy/ui/public/courier/fetch/index.js +++ b/src/legacy/ui/public/courier/fetch/index.js @@ -17,5 +17,5 @@ * under the License. */ -export { FetchSoonProvider } from './fetch_soon'; +export * from './fetch_soon'; export * from './get_search_params'; diff --git a/src/legacy/ui/public/courier/fetch/req_status.js b/src/legacy/ui/public/courier/fetch/req_status.js deleted file mode 100644 index d56bc6d3ad360..0000000000000 --- a/src/legacy/ui/public/courier/fetch/req_status.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export const RequestStatus = { - ABORTED: 'aborted', - INCOMPLETE: 'incomplete', -}; diff --git a/src/legacy/ui/public/courier/fetch/request/index.js b/src/legacy/ui/public/courier/fetch/request/index.js deleted file mode 100644 index 6647d0e5b2e10..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SearchRequestProvider } from './search_request'; diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js deleted file mode 100644 index ecac8cd474098..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import { SearchRequestProvider } from '../search_request'; -import { searchRequestQueue } from '../../../../search_request_queue'; - -describe('ui/courier/fetch search request', () => { - beforeEach(ngMock.module('kibana')); - - afterEach(() => { - searchRequestQueue.removeAll(); - }); - - it('throws exception when created without errorHandler', ngMock.inject((Private) => { - const SearchReq = Private(SearchRequestProvider); - - let caughtError = false; - try { - new SearchReq({ source: {} }); - } catch(error) { - caughtError = true; - } - expect(caughtError).to.be(true); - })); - - describe('start', () => { - it('calls this.source.requestIsStarting(request)', ngMock.inject((Private) => { - const SearchReq = Private(SearchRequestProvider); - - const spy = sinon.spy(() => Promise.resolve()); - const source = { requestIsStarting: spy }; - - const req = new SearchReq({ source, errorHandler: () => {} }); - expect(req.start()).to.have.property('then').a('function'); - sinon.assert.calledOnce(spy); - sinon.assert.calledWithExactly(spy, req); - })); - }); - - describe('clone', () => { - it('returns a search request with identical constructor arguments', ngMock.inject((Private) => { - const SearchRequest = Private(SearchRequestProvider); - - const source = {}; - const errorHandler = () => {}; - const defer = {}; - - const originalRequest = new SearchRequest({ source, errorHandler, defer }); - const clonedRequest = originalRequest.clone(); - - expect(clonedRequest).not.to.be(originalRequest); - expect(clonedRequest.source).to.be(source); - expect(clonedRequest.errorHandler).to.be(errorHandler); - expect(clonedRequest.defer).to.be(defer); - })); - - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/index.js b/src/legacy/ui/public/courier/fetch/request/search_request/index.js deleted file mode 100644 index 6647d0e5b2e10..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SearchRequestProvider } from './search_request'; diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js deleted file mode 100644 index a6ce562e462d8..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import moment from 'moment'; - -import { searchRequestQueue } from '../../../search_request_queue'; - -import { createDefer } from 'ui/promises'; -import { i18n } from '@kbn/i18n'; - -export function SearchRequestProvider(Promise) { - class SearchRequest { - constructor({ source, defer, errorHandler }) { - if (!errorHandler) { - throw new Error( - i18n.translate('common.ui.courier.fetch.requireErrorHandlerErrorMessage', { - defaultMessage: '{errorHandler} is required', - values: { errorHandler: 'errorHandler' } - }) - ); - } - - this.errorHandler = errorHandler; - this.source = source; - this.defer = defer || createDefer(Promise); - this.abortedDefer = createDefer(Promise); - this.type = 'search'; - - // Track execution time. - this.moment = undefined; - this.ms = undefined; - - // Lifecycle state. - this.started = false; - this.stopped = false; - this._isFetchRequested = false; - - searchRequestQueue.add(this); - } - - /** - * Called by the searchPoll to find requests that should be sent to the - * fetchSoon module. When a module is sent to fetchSoon its _isFetchRequested flag - * is set, and this consults that flag so requests are not send to fetchSoon - * multiple times. - * - * @return {Boolean} - */ - canStart() { - if (this.source._fetchDisabled) { - return false; - } - - if (this.stopped) { - return false; - } - - if (this._isFetchRequested) { - return false; - } - - return true; - } - - /** - * Used to find requests that were previously sent to the fetchSoon module but - * have not been started yet, so they can be started. - * - * @return {Boolean} - */ - isFetchRequestedAndPending() { - if (this.started) { - return false; - } - - return this._isFetchRequested; - } - - /** - * Called by the fetchSoon module when this request has been sent to - * be fetched. At that point the request is somewhere between `ready-to-start` - * and `started`. The fetch module then waits a short period of time to - * allow requests to build up in the request queue, and then immediately - * fetches all requests that return true from `isFetchRequestedAndPending()` - * - * @return {undefined} - */ - _setFetchRequested() { - this._isFetchRequested = true; - } - - start() { - if (this.started) { - throw new TypeError( - i18n.translate('common.ui.courier.fetch.unableStartRequestErrorMessage', { - defaultMessage: 'Unable to start request because it has already started', - }) - ); - } - - this.started = true; - this.moment = moment(); - - return this.source.requestIsStarting(this); - } - - getFetchParams() { - return this.source._flatten(); - } - - filterError() { - return false; - } - - handleResponse(resp) { - this.success = true; - this.resp = resp; - } - - handleFailure(error) { - this.success = false; - this.resp = error; - this.resp = (error && error.resp) || error; - return this.errorHandler(this, error); - } - - isIncomplete() { - return false; - } - - continue() { - throw new Error( - i18n.translate('common.ui.courier.fetch.unableContinueRequestErrorMessage', { - defaultMessage: 'Unable to continue {type} request', - values: { type: this.type } - }) - ); - } - - retry() { - const clone = this.clone(); - this.abort(); - return clone; - } - - _markStopped() { - if (this.stopped) return; - this.stopped = true; - this.source.requestIsStopped(this); - searchRequestQueue.remove(this); - } - - abort() { - this._markStopped(); - this.aborted = true; - const error = new Error('The request was aborted.'); - error.name = 'AbortError'; - this.abortedDefer.resolve(error); - this.abortedDefer = null; - this.defer.reject(error); - this.defer = null; - } - - whenAborted(cb) { - this.abortedDefer.promise.then(cb); - } - - complete() { - this._markStopped(); - this.ms = this.moment.diff() * -1; - this.defer.resolve(this.resp); - } - - getCompletePromise() { - return this.defer.promise; - } - - getCompleteOrAbortedPromise() { - return Promise.race([ this.defer.promise, this.abortedDefer.promise ]); - } - - clone = () => { - const { source, defer, errorHandler } = this; - return new SearchRequest({ source, defer, errorHandler }); - }; - } - - return SearchRequest; -} diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js deleted file mode 100644 index 807d53086e106..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SerializeFetchParamsProvider } from './serialize_fetch_params_provider'; diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js deleted file mode 100644 index eed28d0a05b90..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { getPreference, getTimeout } from '../../get_search_params'; - -/** - * - * @param requestsFetchParams {Array.} - * @param Promise - * @param sessionId - * @return {Promise.} - */ -export function serializeFetchParams( - requestsFetchParams, - Promise, - sessionId, - config, - esShardTimeout) { - const promises = requestsFetchParams.map(function (fetchParams) { - return Promise.resolve(fetchParams.index) - .then(function (indexPattern) { - const body = { - timeout: getTimeout(esShardTimeout), - ...fetchParams.body || {}, - }; - - const index = (indexPattern && indexPattern.title) ? indexPattern.title : indexPattern; - - const header = { - index, - search_type: fetchParams.search_type, - ignore_unavailable: true, - preference: getPreference(config, sessionId) - }; - - return `${JSON.stringify(header)}\n${JSON.stringify(body)}`; - }); - }); - - return Promise.all(promises).then(function (requests) { - return requests.join('\n') + '\n'; - }); -} - diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js deleted file mode 100644 index 5f4c5bf9ef45a..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { serializeFetchParams } from './serialize_fetch_params'; -import _ from 'lodash'; - -const DEFAULT_SESSION_ID = '1'; - -function serializeFetchParamsWithDefaults(paramOverrides) { - const paramDefaults = { - requestFetchParams: [], - Promise, - sessionId: DEFAULT_SESSION_ID, - config: { - get: () => { - return 'sessionId'; - } - }, - timeout: 100, - }; - const params = { ...paramDefaults, ...paramOverrides }; - - return serializeFetchParams( - params.requestFetchParams, - Promise, - params.sessionId, - params.config, - params.timeout, - ); -} - -describe('when indexList is not empty', () => { - test('includes the index', () => { - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar', $foo: 'bar' } - } - ]; - return serializeFetchParamsWithDefaults({ requestFetchParams }).then(value => { - expect(_.includes(value, '"index":["logstash-123"]')).toBe(true); - }); - }); -}); - -describe('headers', () => { - - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar' } - } - ]; - - const getHeader = async (paramOverrides) => { - const request = await serializeFetchParamsWithDefaults(paramOverrides); - const requestParts = request.split('\n'); - if (requestParts.length < 2) { - throw new Error('fetch Body does not contain expected format header newline body.'); - } - return JSON.parse(requestParts[0]); - }; - - describe('search request preference', () => { - test('should be set to sessionId when courier:setRequestPreference is "sessionId"', async () => { - const config = { - get: () => { - return 'sessionId'; - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(DEFAULT_SESSION_ID); - }); - - test('should be set to custom string when courier:setRequestPreference is "custom"', async () => { - const CUSTOM_PREFERENCE = '_local'; - const config = { - get: (key) => { - if (key === 'courier:setRequestPreference') { - return 'custom'; - } else if (key === 'courier:customRequestPreference') { - return CUSTOM_PREFERENCE; - } - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(CUSTOM_PREFERENCE); - }); - - test('should not be set when courier:setRequestPreference is "none"', async () => { - const config = { - get: () => { - return 'none'; - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(undefined); - }); - }); -}); - -describe('body', () => { - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar' } - } - ]; - - const getBody = async (paramOverrides) => { - const request = await serializeFetchParamsWithDefaults(paramOverrides); - const requestParts = request.split('\n'); - if (requestParts.length < 2) { - throw new Error('fetch Body does not contain expected format: header newline body.'); - } - return JSON.parse(requestParts[1]); - }; - - describe('timeout', () => { - test('should set a timeout as specified', async () => { - const request = await getBody({ requestFetchParams, timeout: 200 }); - expect(request).toHaveProperty('timeout', '200ms'); - }); - - test('should not set a timeout when timeout is 0', async () => { - const request = await getBody({ requestFetchParams, timeout: 0 }); - expect(request.timeout).toBe(undefined); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js deleted file mode 100644 index 4ddcc05b927ff..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { serializeFetchParams } from './serialize_fetch_params'; - -export function SerializeFetchParamsProvider(Promise, sessionId, config, esShardTimeout) { - return (fetchParams) => ( - serializeFetchParams( - fetchParams, - Promise, - sessionId, - config, - esShardTimeout) - ); -} diff --git a/src/legacy/ui/public/courier/index.js b/src/legacy/ui/public/courier/index.js index 01ef07df26670..5647af3d0d645 100644 --- a/src/legacy/ui/public/courier/index.js +++ b/src/legacy/ui/public/courier/index.js @@ -17,9 +17,7 @@ * under the License. */ -import './courier'; - -export { SearchSourceProvider } from './search_source'; +export { SearchSource } from './search_source'; export { addSearchStrategy, diff --git a/src/legacy/ui/public/courier/search_poll/search_poll.js b/src/legacy/ui/public/courier/search_poll/search_poll.js index 91c866c14aa49..f00c2a32e0ec6 100644 --- a/src/legacy/ui/public/courier/search_poll/search_poll.js +++ b/src/legacy/ui/public/courier/search_poll/search_poll.js @@ -19,98 +19,50 @@ import _ from 'lodash'; -import { fatalError } from '../../notify'; -import '../../promises'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchSoonProvider } from '../fetch'; import { timefilter } from 'ui/timefilter'; -export function SearchPollProvider(Private, Promise) { - const fetchSoon = Private(FetchSoonProvider); - - class SearchPoll { - constructor() { - this._isPolling = false; - this._intervalInMs = undefined; - this._timerId = null; - this._searchPromise = null; - this._isIntervalFasterThanSearch = false; - } - - setIntervalInMs = intervalInMs => { - this._intervalInMs = _.parseInt(intervalInMs); - }; - - resume = () => { - this._isPolling = true; - this.resetTimer(); - }; - - pause = () => { - this._isPolling = false; - this.clearTimer(); - }; - - resetTimer = () => { - // Cancel the pending search and schedule a new one. - this.clearTimer(); - - if (this._isPolling) { - this._timerId = setTimeout(this._search, this._intervalInMs); - } - }; +export class SearchPoll { + constructor() { + this._isPolling = false; + this._intervalInMs = undefined; + this._timerId = null; + } - clearTimer = () => { - // Cancel the pending search, if there is one. - if (this._timerId) { - clearTimeout(this._timerId); - this._timerId = null; - } - }; + setIntervalInMs = intervalInMs => { + this._intervalInMs = _.parseInt(intervalInMs); + }; - _search = () => { - // If our interval is faster than the rate at which searches return results, then trigger - // a new search as soon as the results come back. - if (this._searchPromise) { - this._isIntervalFasterThanSearch = true; - return; - } + resume = () => { + this._isPolling = true; + this.resetTimer(); + }; - // Schedule another search. - this.resetTimer(); + pause = () => { + this._isPolling = false; + this.clearTimer(); + }; - // We use resolve() here instead of try() because the latter won't trigger a $digest - // when the promise resolves. - this._searchPromise = Promise.resolve().then(() => { - timefilter.notifyShouldFetch(); - const requests = searchRequestQueue.getInactive(); + resetTimer = () => { + // Cancel the pending search and schedule a new one. + this.clearTimer(); - // The promise returned from fetchSearchRequests() only resolves when the requests complete. - // We want to continue even if the requests abort so we return a different promise. - fetchSoon.fetchSearchRequests(requests); + if (this._isPolling) { + this._timerId = setTimeout(this._search, this._intervalInMs); + } + }; - return Promise.all( - requests.map(request => request.getCompleteOrAbortedPromise()) - ); - }) - .then(() => { - this._searchPromise = null; + clearTimer = () => { + // Cancel the pending search, if there is one. + if (this._timerId) { + clearTimeout(this._timerId); + this._timerId = null; + } + }; - // If the search response comes back before the interval fires, then we'll wait - // for the interval and let it kick off the next search. But if the interval fires before - // the search returns results, then we'll need to wait for the search to return results - // and then kick off another search again. A new search will also reset the interval. - if (this._isIntervalFasterThanSearch) { - this._isIntervalFasterThanSearch = false; - this._search(); - } - }) - .catch(err => { - // If there was a problem, then kill Kibana. - fatalError(err); - }); - }; - } + _search = () => { + // Schedule another search. + this.resetTimer(); - return new SearchPoll(); + timefilter.notifyShouldFetch(); + }; } diff --git a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js deleted file mode 100644 index f6b4e4bef20c2..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -import { searchRequestQueue } from '../search_request_queue'; - -describe('Courier Request Queue', function () { - beforeEach(ngMock.module('kibana')); - beforeEach(() => searchRequestQueue.removeAll()); - after(() => searchRequestQueue.removeAll()); - - class MockReq { - constructor(startable = true) { - this.source = {}; - this.canStart = sinon.stub().returns(startable); - } - } - - describe('#getStartable()', function () { - it('returns only startable requests', function () { - searchRequestQueue.add(new MockReq(false)); - searchRequestQueue.add(new MockReq(true)); - expect(searchRequestQueue.getStartable()).to.have.length(1); - }); - }); - - // Note: I'm not convinced this discrepancy between how we calculate startable vs inactive requests makes any sense. - // I'm only testing here that the current, (very old) code continues to behave how it always did, but it may turn out - // that we can clean this up, or remove this. - describe('#getInactive()', function () { - it('returns only requests with started = false', function () { - searchRequestQueue.add({ started: true }); - searchRequestQueue.add({ started: false }); - searchRequestQueue.add({ started: true }); - expect(searchRequestQueue.getInactive()).to.have.length(1); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/search_request_queue/index.js b/src/legacy/ui/public/courier/search_request_queue/index.js deleted file mode 100644 index 785a59fce73d5..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { searchRequestQueue } from './search_request_queue'; diff --git a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js deleted file mode 100644 index 80d74cdad94fe..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -class SearchRequestQueue { - constructor() { - // Queue of pending requests, requests are removed as they are processed by fetch.[sourceType](). - this._searchRequests = []; - } - - getCount() { - return this._searchRequests.length; - } - - add(searchRequest) { - this._searchRequests.push(searchRequest); - } - - remove(searchRequest) { - // Remove all matching search requests. - this._searchRequests = this._searchRequests.filter( - existingSearchRequest => existingSearchRequest !== searchRequest - ); - } - - removeAll() { - this._searchRequests.length = 0; - } - - abortAll() { - this._searchRequests.forEach(searchRequest => searchRequest.abort()); - } - - getAll() { - return this._searchRequests; - } - - getSearchRequestAt(index) { - return this._searchRequests[index]; - } - - getInactive() { - return this._searchRequests.filter(searchRequest => !searchRequest.started); - } - - getStartable() { - return this._searchRequests.filter(searchRequest => searchRequest.canStart()); - } - - getPending() { - return this._searchRequests.filter(searchRequest => searchRequest.isFetchRequestedAndPending()); - } -} - -export const searchRequestQueue = new SearchRequestQueue(); diff --git a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js index ca3d21a330ce1..279e389dec114 100644 --- a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js +++ b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js @@ -20,18 +20,17 @@ import '../../../private'; import ngMock from 'ng_mock'; import expect from '@kbn/expect'; -import { NormalizeSortRequestProvider } from '../_normalize_sort_request'; +import { normalizeSortRequest } from '../_normalize_sort_request'; import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; import _ from 'lodash'; describe('SearchSource#normalizeSortRequest', function () { - let normalizeSortRequest; let indexPattern; let normalizedSort; + const defaultSortOptions = { unmapped_type: 'boolean' }; beforeEach(ngMock.module('kibana')); beforeEach(ngMock.inject(function (Private) { - normalizeSortRequest = Private(NormalizeSortRequestProvider); indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); normalizedSort = [{ @@ -44,7 +43,7 @@ describe('SearchSource#normalizeSortRequest', function () { it('should return an array', function () { const sortable = { someField: 'desc' }; - const result = normalizeSortRequest(sortable, indexPattern); + const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions); expect(result).to.be.an(Array); expect(result).to.eql(normalizedSort); // ensure object passed in is not mutated @@ -53,7 +52,7 @@ describe('SearchSource#normalizeSortRequest', function () { }); it('should make plain string sort into the more verbose format', function () { - const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern); + const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern, defaultSortOptions); expect(result).to.eql(normalizedSort); }); @@ -64,7 +63,7 @@ describe('SearchSource#normalizeSortRequest', function () { unmapped_type: 'boolean' } }]; - const result = normalizeSortRequest(sortState, indexPattern); + const result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions); expect(result).to.eql(normalizedSort); }); @@ -86,11 +85,11 @@ describe('SearchSource#normalizeSortRequest', function () { } }; - let result = normalizeSortRequest(sortState, indexPattern); + let result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); sortState[fieldName] = { order: direction }; - result = normalizeSortRequest([sortState], indexPattern); + result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); }); @@ -105,7 +104,7 @@ describe('SearchSource#normalizeSortRequest', function () { order: direction, unmapped_type: 'boolean' }; - const result = normalizeSortRequest([sortState], indexPattern); + const result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); }); @@ -118,7 +117,7 @@ describe('SearchSource#normalizeSortRequest', function () { } }]; - const result = normalizeSortRequest(sortable, indexPattern); + const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions); expect(_.isEqual(result, expected)).to.be.ok(); }); diff --git a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js b/src/legacy/ui/public/courier/search_source/__tests__/search_source.js deleted file mode 100644 index ccb3c55b7a381..0000000000000 --- a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -import { searchRequestQueue } from '../../search_request_queue'; -import { SearchSourceProvider } from '../search_source'; -import StubIndexPattern from 'test_utils/stub_index_pattern'; - -function timeout() { - return new Promise(resolve => { - setTimeout(resolve); - }); -} - -describe('SearchSource', function () { - require('test_utils/no_digest_promises').activateForSuite(); - - let config; - let SearchSource; - let indexPattern; - let indexPattern2; - - beforeEach(ngMock.module('kibana')); - beforeEach(ngMock.inject(function (Private, _config_) { - config = _config_; - SearchSource = Private(SearchSourceProvider); - - indexPattern = new StubIndexPattern('test-*', cfg => cfg, null, []); - indexPattern2 = new StubIndexPattern('test2-*', cfg => cfg, null, []); - expect(indexPattern).to.not.be(indexPattern2); - })); - beforeEach(() => searchRequestQueue.removeAll()); - after(() => searchRequestQueue.removeAll()); - - describe('#onResults()', function () { - it('adds a request to the searchRequestQueue', function () { - const searchSource = new SearchSource(); - - expect(searchRequestQueue.getCount()).to.be(0); - searchSource.onResults(); - expect(searchRequestQueue.getCount()).to.be(1); - }); - - it('returns a promise that is resolved with the results', function () { - const searchSource = new SearchSource(); - const fakeResults = {}; - - const promise = searchSource.onResults().then((results) => { - expect(results).to.be(fakeResults); - }); - - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - searchRequest.defer.resolve(fakeResults); - return promise; - }); - }); - - describe('#destroy()', function () { - it('aborts all startable requests', function () { - const searchSource = new SearchSource(); - searchSource.onResults(); - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - sinon.stub(searchRequest, 'canStart').returns(true); - searchSource.destroy(); - expect(searchRequestQueue.getCount()).to.be(0); - }); - - it('aborts all non-startable requests', function () { - const searchSource = new SearchSource(); - searchSource.onResults(); - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - sinon.stub(searchRequest, 'canStart').returns(false); - searchSource.destroy(); - expect(searchRequestQueue.getCount()).to.be(0); - }); - }); - - describe('#setField()', function () { - it('sets the value for the property', function () { - const searchSource = new SearchSource(); - searchSource.setField('aggs', 5); - expect(searchSource.getField('aggs')).to.be(5); - }); - - it('throws an error if the property is not accepted', function () { - const searchSource = new SearchSource(); - expect(() => searchSource.setField('index', 5)).to.throwError(); - }); - }); - - describe('#getField()', function () { - it('gets the value for the property', function () { - const searchSource = new SearchSource(); - searchSource.setField('aggs', 5); - expect(searchSource.getField('aggs')).to.be(5); - }); - - it('throws an error if the property is not accepted', function () { - const searchSource = new SearchSource(); - expect(() => searchSource.getField('unacceptablePropName')).to.throwError(); - }); - }); - - describe(`#setField('index')`, function () { - describe('auto-sourceFiltering', function () { - describe('new index pattern assigned', function () { - it('generates a searchSource filter', function () { - const searchSource = new SearchSource(); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - searchSource.setField('index', indexPattern); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be.a('function'); - }); - - it('removes created searchSource filter on removal', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - }); - }); - - describe('new index pattern assigned over another', function () { - it('replaces searchSource filter with new', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - const searchSourceFilter1 = searchSource.getField('source'); - searchSource.setField('index', indexPattern2); - expect(searchSource.getField('index')).to.be(indexPattern2); - expect(searchSource.getField('source')).to.be.a('function'); - expect(searchSource.getField('source')).to.not.be(searchSourceFilter1); - }); - - it('removes created searchSource filter on removal', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - searchSource.setField('index', indexPattern2); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - }); - }); - - describe('ip assigned before custom searchSource filter', function () { - it('custom searchSource filter becomes new searchSource', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('index', indexPattern); - expect(searchSource.getField('source')).to.be.a('function'); - searchSource.setField('source', football); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be(football); - }); - - it('custom searchSource stays after removal', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('index', indexPattern); - searchSource.setField('source', football); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(football); - }); - }); - - describe('ip assigned after custom searchSource filter', function () { - it('leaves the custom filter in place', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('source', football); - searchSource.setField('index', indexPattern); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be(football); - }); - - it('custom searchSource stays after removal', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('source', football); - searchSource.setField('index', indexPattern); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(football); - }); - }); - }); - }); - - describe('#onRequestStart()', () => { - it('should be called when starting a request', async () => { - const searchSource = new SearchSource(); - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - }); - - it('should not be called on parent searchSource', async () => { - const parent = new SearchSource(); - const searchSource = new SearchSource().setParent(parent); - - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const parentFn = sinon.spy(); - parent.onRequestStart(parentFn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - expect(parentFn.notCalled).to.be(true); - }); - - it('should be called on parent searchSource if callParentStartHandlers is true', async () => { - const parent = new SearchSource(); - const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true }); - - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const parentFn = sinon.spy(); - parent.onRequestStart(parentFn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - expect(parentFn.calledWith(searchSource, request)).to.be(true); - }); - }); - - describe('#_mergeProp', function () { - describe('filter', function () { - let searchSource; - let state; - - beforeEach(function () { - searchSource = new SearchSource(); - state = {}; - }); - - [null, undefined].forEach(falsyValue => { - it(`ignores ${falsyValue} filter`, function () { - searchSource._mergeProp(state, falsyValue, 'filter'); - expect(state.filters).to.be(undefined); - }); - }); - - [false, 0, '', NaN].forEach(falsyValue => { - it(`doesn't add ${falsyValue} filter`, function () { - searchSource._mergeProp(state, falsyValue, 'filter'); - expect(state.filters).to.be.empty(); - }); - }); - - it('adds "meta.disabled: undefined" filter', function () { - const filter = { - meta: {} - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([filter]); - }); - - it('adds "meta.disabled: false" filter', function () { - const filter = { - meta: { - disabled: false - } - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([filter]); - }); - - it(`doesn't add "meta.disabled: true" filter`, function () { - const filter = { - meta: { - disabled: true - } - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.be.empty(); - }); - - describe('when courier:ignoreFilterIfFieldNotInIndex is false', function () { - it('adds filter for non-existent field', function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', false); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([ filter ]); - }); - }); - - describe('when courier:ignoreFilterIfFieldNotInIndex is true', function () { - it(`doesn't add filter for non-existent field`, function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', true); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.be.empty(); - }); - - it(`adds filter for existent field`, function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', true); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [{ name: 'bar' }] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([ filter ]); - }); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js index 2b5025f14fef7..3e5d7a1374115 100644 --- a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js +++ b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js @@ -19,59 +19,55 @@ import _ from 'lodash'; -export function NormalizeSortRequestProvider(config) { - const defaultSortOptions = config.get('sort:options'); - - /** +/** * Decorate queries with default parameters * @param {query} query object * @returns {object} */ - return function (sortObject, indexPattern) { - // [].concat({}) -> [{}], [].concat([{}]) -> [{}] - return [].concat(sortObject).map(function (sortable) { - return normalize(sortable, indexPattern); - }); - }; +export function normalizeSortRequest(sortObject, indexPattern, defaultSortOptions) { + // [].concat({}) -> [{}], [].concat([{}]) -> [{}] + return [].concat(sortObject).map(function (sortable) { + return normalize(sortable, indexPattern, defaultSortOptions); + }); +} - /* +/* Normalize the sort description to the more verbose format: { someField: "desc" } into { someField: { "order": "desc"}} */ - function normalize(sortable, indexPattern) { - const normalized = {}; - let sortField = _.keys(sortable)[0]; - let sortValue = sortable[sortField]; - const indexField = indexPattern.fields.getByName(sortField); +function normalize(sortable, indexPattern, defaultSortOptions) { + const normalized = {}; + let sortField = _.keys(sortable)[0]; + let sortValue = sortable[sortField]; + const indexField = indexPattern.fields.getByName(sortField); - if (indexField && indexField.scripted && indexField.sortable) { - let direction; - if (_.isString(sortValue)) direction = sortValue; - if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order; + if (indexField && indexField.scripted && indexField.sortable) { + let direction; + if (_.isString(sortValue)) direction = sortValue; + if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order; - sortField = '_script'; - sortValue = { - script: { - source: indexField.script, - lang: indexField.lang - }, - type: castSortType(indexField.type), - order: direction - }; - } else { - if (_.isString(sortValue)) { - sortValue = { order: sortValue }; - } - sortValue = _.defaults({}, sortValue, defaultSortOptions); - - if (sortField === '_score') { - delete sortValue.unmapped_type; - } + sortField = '_script'; + sortValue = { + script: { + source: indexField.script, + lang: indexField.lang + }, + type: castSortType(indexField.type), + order: direction + }; + } else { + if (_.isString(sortValue)) { + sortValue = { order: sortValue }; } + sortValue = _.defaults({}, sortValue, defaultSortOptions); - normalized[sortField] = sortValue; - return normalized; + if (sortField === '_score') { + delete sortValue.unmapped_type; + } } + + normalized[sortField] = sortValue; + return normalized; } // The ES API only supports sort scripts of type 'number' and 'string' diff --git a/src/legacy/ui/public/courier/search_source/index.js b/src/legacy/ui/public/courier/search_source/index.js index 5ec7cc315db1c..dcae7b3d2ff05 100644 --- a/src/legacy/ui/public/courier/search_source/index.js +++ b/src/legacy/ui/public/courier/search_source/index.js @@ -17,4 +17,4 @@ * under the License. */ -export { SearchSourceProvider } from './search_source'; +export { SearchSource } from './search_source'; diff --git a/src/legacy/ui/public/courier/search_source/mocks.ts b/src/legacy/ui/public/courier/search_source/mocks.ts new file mode 100644 index 0000000000000..bf546c1b9e7c2 --- /dev/null +++ b/src/legacy/ui/public/courier/search_source/mocks.ts @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"), you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const searchSourceMock = { + setPreferredSearchStrategyId: jest.fn(), + getPreferredSearchStrategyId: jest.fn(), + setFields: jest.fn(), + setField: jest.fn(), + getId: jest.fn(), + getFields: jest.fn(), + getField: jest.fn(), + getOwnField: jest.fn(), + create: jest.fn(), + createCopy: jest.fn(), + createChild: jest.fn(), + setParent: jest.fn(), + getParent: jest.fn(), + fetch: jest.fn(), + onRequestStart: jest.fn(), + getSearchRequestBody: jest.fn(), + destroy: jest.fn(), + history: [], +}; diff --git a/src/legacy/ui/public/courier/search_source/search_source.d.ts b/src/legacy/ui/public/courier/search_source/search_source.d.ts index 11406ff3da824..674e7ace0594c 100644 --- a/src/legacy/ui/public/courier/search_source/search_source.d.ts +++ b/src/legacy/ui/public/courier/search_source/search_source.d.ts @@ -17,4 +17,23 @@ * under the License. */ -export type SearchSource = any; +export declare class SearchSource { + setPreferredSearchStrategyId: (searchStrategyId: string) => void; + getPreferredSearchStrategyId: () => string; + setFields: (newFields: any) => SearchSource; + setField: (field: string, value: any) => SearchSource; + getId: () => string; + getFields: () => any; + getField: (field: string) => any; + getOwnField: () => any; + create: () => SearchSource; + createCopy: () => SearchSource; + createChild: (options?: any) => SearchSource; + setParent: (parent: SearchSource | boolean) => SearchSource; + getParent: () => SearchSource | undefined; + fetch: (options?: any) => Promise; + onRequestStart: (handler: (searchSource: SearchSource, options: any) => void) => void; + getSearchRequestBody: () => any; + destroy: () => void; + history: any[]; +} diff --git a/src/legacy/ui/public/courier/search_source/search_source.js b/src/legacy/ui/public/courier/search_source/search_source.js index afa42a7d7c015..ed8d15d61c1db 100644 --- a/src/legacy/ui/public/courier/search_source/search_source.js +++ b/src/legacy/ui/public/courier/search_source/search_source.js @@ -71,16 +71,16 @@ import _ from 'lodash'; import angular from 'angular'; -import { buildEsQuery, getEsQueryConfig, filterMatchesIndex } from '@kbn/es-query'; +import { buildEsQuery, getEsQueryConfig } from '@kbn/es-query'; -import { createDefer } from 'ui/promises'; -import { NormalizeSortRequestProvider } from './_normalize_sort_request'; -import { SearchRequestProvider } from '../fetch/request'; +import { normalizeSortRequest } from './_normalize_sort_request'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchSoonProvider } from '../fetch'; -import { FieldWildcardProvider } from '../../field_wildcard'; +import { fetchSoon } from '../fetch'; +import { fieldWildcardFilter } from '../../field_wildcard'; import { getHighlightRequest } from '../../../../../plugins/data/common/field_formats'; +import { npSetup } from 'ui/new_platform'; +import chrome from '../../chrome'; +import { RequestFailure } from '../fetch/errors'; import { filterDocvalueFields } from './filter_docvalue_fields'; const FIELDS = [ @@ -114,327 +114,242 @@ function isIndexPattern(val) { return Boolean(val && typeof val.title === 'string'); } -export function SearchSourceProvider(Promise, Private, config) { - const SearchRequest = Private(SearchRequestProvider); - const normalizeSortRequest = Private(NormalizeSortRequestProvider); - const fetchSoon = Private(FetchSoonProvider); - const { fieldWildcardFilter } = Private(FieldWildcardProvider); - const getConfig = (...args) => config.get(...args); +const esShardTimeout = npSetup.core.injectedMetadata.getInjectedVar('esShardTimeout'); +const config = npSetup.core.uiSettings; +const getConfig = (...args) => config.get(...args); +const forIp = Symbol('for which index pattern?'); - const forIp = Symbol('for which index pattern?'); +export class SearchSource { + constructor(initialFields) { + this._id = _.uniqueId('data_source'); - class SearchSource { - constructor(initialFields) { - this._id = _.uniqueId('data_source'); + this._searchStrategyId = undefined; + this._fields = parseInitialFields(initialFields); + this._parent = undefined; - this._searchStrategyId = undefined; - this._fields = parseInitialFields(initialFields); - this._parent = undefined; - - this.history = []; - this._requestStartHandlers = []; - this._inheritOptions = {}; - - this._filterPredicates = [ - (filter) => { - // remove null/undefined filters - return filter; - }, - (filter) => { - const disabled = _.get(filter, 'meta.disabled'); - return disabled === undefined || disabled === false; - }, - (filter, data) => { - const index = data.index || this.getField('index'); - return !config.get('courier:ignoreFilterIfFieldNotInIndex') || filterMatchesIndex(filter, index); - } - ]; - } + this.history = []; + this._requestStartHandlers = []; + this._inheritOptions = {}; + } - /***** + /***** * PUBLIC API *****/ - setPreferredSearchStrategyId(searchStrategyId) { - this._searchStrategyId = searchStrategyId; - } - - getPreferredSearchStrategyId() { - return this._searchStrategyId; - } - - setFields(newFields) { - this._fields = newFields; - return this; - } - - setField = (field, value) => { - if (!FIELDS.includes(field)) { - throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + setPreferredSearchStrategyId(searchStrategyId) { + this._searchStrategyId = searchStrategyId; + } - if (field === 'index') { - const fields = this._fields; + getPreferredSearchStrategyId() { + return this._searchStrategyId; + } - const hasSource = fields.source; - const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp); - const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index; - if (sourceIsForOurIp) { - delete fields.source; - } + setFields(newFields) { + this._fields = newFields; + return this; + } - if (value === null || value === undefined) { - delete fields.index; - return this; - } + setField(field, value) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); + } - if (!isIndexPattern(value)) { - throw new TypeError('expected indexPattern to be an IndexPattern duck.'); - } + if (field === 'index') { + const fields = this._fields; - fields[field] = value; - if (!fields.source) { - // imply source filtering based on the index pattern, but allow overriding - // it by simply setting another field for "source". When index is changed - fields.source = function () { - return value.getSourceFiltering(); - }; - fields.source[forIp] = value; - } + const hasSource = fields.source; + const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp); + const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index; + if (sourceIsForOurIp) { + delete fields.source; + } + if (value === null || value === undefined) { + delete fields.index; return this; } - if (value == null) { - delete this._fields[field]; - return this; + if (!isIndexPattern(value)) { + throw new TypeError('expected indexPattern to be an IndexPattern duck.'); } - this._fields[field] = value; - return this; - }; + fields[field] = value; + if (!fields.source) { + // imply source filtering based on the index pattern, but allow overriding + // it by simply setting another field for "source". When index is changed + fields.source = function () { + return value.getSourceFiltering(); + }; + fields.source[forIp] = value; + } - getId() { - return this._id; + return this; } - getFields() { - return _.clone(this._fields); + if (value == null) { + delete this._fields[field]; + return this; } - /** - * Get fields from the fields - */ - getField = field => { - if (!FIELDS.includes(field)) { - throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + this._fields[field] = value; + return this; + } - let searchSource = this; + getId() { + return this._id; + } - while (searchSource) { - const value = searchSource._fields[field]; - if (value !== void 0) { - return value; - } + getFields() { + return _.clone(this._fields); + } - searchSource = searchSource.getParent(); - } - }; + /** + * Get fields from the fields + */ + getField(field) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); + } - /** - * Get the field from our own fields, don't traverse up the chain - */ - getOwnField(field) { - if (!FIELDS.includes(field)) { - throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + let searchSource = this; - const value = this._fields[field]; + while (searchSource) { + const value = searchSource._fields[field]; if (value !== void 0) { return value; } - } - create() { - return new SearchSource(); + searchSource = searchSource.getParent(); } + } - createCopy() { - const json = angular.toJson(this._fields); - const newSearchSource = new SearchSource(json); - // when serializing the internal fields we lose the internal classes used in the index - // pattern, so we have to set it again to workaround this behavior - newSearchSource.setField('index', this.getField('index')); - newSearchSource.setParent(this.getParent()); - return newSearchSource; + /** + * Get the field from our own fields, don't traverse up the chain + */ + getOwnField(field) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); } - createChild(options = {}) { - const childSearchSource = new SearchSource(); - childSearchSource.setParent(this, options); - return childSearchSource; + const value = this._fields[field]; + if (value !== void 0) { + return value; } + } - /** + create() { + return new SearchSource(); + } + + createCopy() { + const json = angular.toJson(this._fields); + const newSearchSource = new SearchSource(json); + // when serializing the internal fields we lose the internal classes used in the index + // pattern, so we have to set it again to workaround this behavior + newSearchSource.setField('index', this.getField('index')); + newSearchSource.setParent(this.getParent()); + return newSearchSource; + } + + createChild(options = {}) { + const childSearchSource = new SearchSource(); + childSearchSource.setParent(this, options); + return childSearchSource; + } + + /** * Set a searchSource that this source should inherit from * @param {SearchSource} searchSource - the parent searchSource * @return {this} - chainable */ - setParent(parent, options = {}) { - this._parent = parent; - this._inheritOptions = options; - return this; - } + setParent(parent, options = {}) { + this._parent = parent; + this._inheritOptions = options; + return this; + } - /** + /** * Get the parent of this SearchSource * @return {undefined|searchSource} */ - getParent() { - return this._parent || undefined; - } + getParent() { + return this._parent || undefined; + } - /** + /** * Fetch this source and reject the returned Promise on error * * @async */ - fetch() { - const self = this; - let req = _.first(self._myStartableQueued()); - - if (!req) { - const errorHandler = (request, error) => { - request.defer.reject(error); - request.abort(); - }; - req = self._createRequest({ errorHandler }); - } + async fetch(options) { + const $injector = await chrome.dangerouslyGetActiveInjector(); + const es = $injector.get('es'); - fetchSoon.fetchSearchRequests([req]); - return req.getCompletePromise(); - } + await this.requestIsStarting(options); - /** - * Fetch all pending requests for this source ASAP - * @async - */ - fetchQueued() { - return fetchSoon.fetchSearchRequests(this._myStartableQueued()); - } + const searchRequest = await this._flatten(); + this.history = [searchRequest]; - /** - * Cancel all pending requests for this searchSource - * @return {undefined} - */ - cancelQueued() { - searchRequestQueue.getAll() - .filter(req => req.source === this) - .forEach(req => req.abort()); + const response = await fetchSoon(searchRequest, { + ...(this._searchStrategyId && { searchStrategyId: this._searchStrategyId }), + ...options, + }, { es, config, esShardTimeout }); + + if (response.error) { + throw new RequestFailure(null, response); } - /** + return response; + } + + /** * Add a handler that will be notified whenever requests start * @param {Function} handler * @return {undefined} */ - onRequestStart(handler) { - this._requestStartHandlers.push(handler); - } + onRequestStart(handler) { + this._requestStartHandlers.push(handler); + } - /** + /** * Called by requests of this search source when they are started * @param {Courier.Request} request + * @param options * @return {Promise} */ - requestIsStarting(request) { - this.activeFetchCount = (this.activeFetchCount || 0) + 1; - this.history = [request]; - - const handlers = [...this._requestStartHandlers]; - // If callparentStartHandlers has been set to true, we also call all - // handlers of parent search sources. - if (this._inheritOptions.callParentStartHandlers) { - let searchSource = this.getParent(); - while (searchSource) { - handlers.push(...searchSource._requestStartHandlers); - searchSource = searchSource.getParent(); - } + requestIsStarting(options) { + const handlers = [...this._requestStartHandlers]; + // If callparentStartHandlers has been set to true, we also call all + // handlers of parent search sources. + if (this._inheritOptions.callParentStartHandlers) { + let searchSource = this.getParent(); + while (searchSource) { + handlers.push(...searchSource._requestStartHandlers); + searchSource = searchSource.getParent(); } - - return Promise - .map(handlers, fn => fn(this, request)) - .then(_.noop); } - /** - * Put a request in to the courier that this Source should - * be fetched on the next run of the courier - * @return {Promise} - */ - onResults() { - const self = this; - - return new Promise(function (resolve, reject) { - const defer = createDefer(Promise); - defer.promise.then(resolve, reject); - - const errorHandler = (request, error) => { - reject(error); - request.abort(); - }; - self._createRequest({ defer, errorHandler }); - }); - } - - async getSearchRequestBody() { - const searchRequest = await this._flatten(); - return searchRequest.body; - } + return Promise.all(handlers.map(fn => fn(this, options))); + } - /** - * Called by requests of this search source when they are done - * @param {Courier.Request} request - * @return {undefined} - */ - requestIsStopped() { - this.activeFetchCount -= 1; - } + async getSearchRequestBody() { + const searchRequest = await this._flatten(); + return searchRequest.body; + } - /** + /** * Completely destroy the SearchSource. * @return {undefined} */ - destroy() { - this.cancelQueued(); - this._requestStartHandlers.length = 0; - } + destroy() { + this._requestStartHandlers.length = 0; + } - /****** + /****** * PRIVATE APIS ******/ - _myStartableQueued() { - return searchRequestQueue - .getStartable() - .filter(req => req.source === this); - } - - /** - * Create a common search request object, which should - * be put into the pending request queue, for this search - * source - * - * @param {Deferred} defer - the deferred object that should be resolved - * when the request is complete - * @return {SearchRequest} - */ - _createRequest({ defer, errorHandler }) { - return new SearchRequest({ source: this, defer, errorHandler }); - } - - /** + /** * Used to merge properties into the data within ._flatten(). * The data is passed in and modified by the function * @@ -443,192 +358,184 @@ export function SearchSourceProvider(Promise, Private, config) { * @param {*} key - The key of `val` * @return {undefined} */ - _mergeProp(data, val, key) { - if (typeof val === 'function') { - const source = this; - return Promise.cast(val(this)) - .then(function (newVal) { - return source._mergeProp(data, newVal, key); - }); - } - - if (val == null || !key || !_.isString(key)) return; - - switch (key) { - case 'filter': - let filters = Array.isArray(val) ? val : [val]; - - filters = filters.filter(filter => { - return this._filterPredicates.every(predicate => predicate(filter, data)); - }); + _mergeProp(data, val, key) { + if (typeof val === 'function') { + const source = this; + return Promise.resolve(val(this)) + .then(function (newVal) { + return source._mergeProp(data, newVal, key); + }); + } - data.filters = [...(data.filters || []), ...filters]; - return; - case 'index': - case 'type': - case 'id': - case 'highlightAll': - if (key && data[key] == null) { - data[key] = val; - } - return; - case 'searchAfter': - key = 'search_after'; - addToBody(); - break; - case 'source': - key = '_source'; - addToBody(); - break; - case 'sort': - val = normalizeSortRequest(val, this.getField('index')); - addToBody(); - break; - case 'query': - data.query = (data.query || []).concat(val); - break; - case 'fields': - data[key] = _.uniq([...(data[key] || []), ...val]); - break; - default: - addToBody(); - } + if (val == null || !key || !_.isString(key)) return; + + switch (key) { + case 'filter': + const filters = Array.isArray(val) ? val : [val]; + data.filters = [...(data.filters || []), ...filters]; + return; + case 'index': + case 'type': + case 'id': + case 'highlightAll': + if (key && data[key] == null) { + data[key] = val; + } + return; + case 'searchAfter': + key = 'search_after'; + addToBody(); + break; + case 'source': + key = '_source'; + addToBody(); + break; + case 'sort': + val = normalizeSortRequest(val, this.getField('index'), config.get('sort:options')); + addToBody(); + break; + case 'query': + data.query = (data.query || []).concat(val); + break; + case 'fields': + data[key] = _.uniq([...(data[key] || []), ...val]); + break; + default: + addToBody(); + } - /** + /** * Add the key and val to the body of the request */ - function addToBody() { - data.body = data.body || {}; - // ignore if we already have a value - if (data.body[key] == null) { - data.body[key] = val; - } + function addToBody() { + data.body = data.body || {}; + // ignore if we already have a value + if (data.body[key] == null) { + data.body[key] = val; } } + } - /** + /** * Walk the inheritance chain of a source and return it's * flat representation (taking into account merging rules) * @returns {Promise} * @resolved {Object|null} - the flat data of the SearchSource */ - _flatten() { - // the merged data of this dataSource and it's ancestors - const flatData = {}; - - // function used to write each property from each data object in the chain to flat data - const root = this; - - // start the chain at this source - let current = this; - - // call the ittr and return it's promise - return (function ittr() { - // iterate the _fields object (not array) and - // pass each key:value pair to source._mergeProp. if _mergeProp - // returns a promise, then wait for it to complete and call _mergeProp again - return Promise.all(_.map(current._fields, function ittr(value, key) { - if (Promise.is(value)) { - return value.then(function (value) { - return ittr(value, key); - }); - } - - const prom = root._mergeProp(flatData, value, key); - return Promise.is(prom) ? prom : null; - })) - .then(function () { - // move to this sources parent - const parent = current.getParent(); - // keep calling until we reach the top parent - if (parent) { - current = parent; - return ittr(); - } + _flatten() { + // the merged data of this dataSource and it's ancestors + const flatData = {}; + + // function used to write each property from each data object in the chain to flat data + const root = this; + + // start the chain at this source + let current = this; + + // call the ittr and return it's promise + return (function ittr() { + // iterate the _fields object (not array) and + // pass each key:value pair to source._mergeProp. if _mergeProp + // returns a promise, then wait for it to complete and call _mergeProp again + return Promise.all(_.map(current._fields, function ittr(value, key) { + if (value instanceof Promise) { + return value.then(function (value) { + return ittr(value, key); }); - }()) - .then(function () { - // This is down here to prevent the circular dependency - flatData.body = flatData.body || {}; - - const computedFields = flatData.index.getComputedFields(); - - flatData.body.stored_fields = computedFields.storedFields; - flatData.body.script_fields = flatData.body.script_fields || {}; - _.extend(flatData.body.script_fields, computedFields.scriptFields); - - const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : []; - flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields; + } - if (flatData.body._source) { - // exclude source fields for this index pattern specified by the user - const filter = fieldWildcardFilter(flatData.body._source.excludes); - flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter( - docvalueField => filter(docvalueField.field) - ); + const prom = root._mergeProp(flatData, value, key); + return prom instanceof Promise ? prom : null; + })) + .then(function () { + // move to this sources parent + const parent = current.getParent(); + // keep calling until we reach the top parent + if (parent) { + current = parent; + return ittr(); } + }); + }()) + .then(function () { + // This is down here to prevent the circular dependency + flatData.body = flatData.body || {}; + + const computedFields = flatData.index.getComputedFields(); + + flatData.body.stored_fields = computedFields.storedFields; + flatData.body.script_fields = flatData.body.script_fields || {}; + _.extend(flatData.body.script_fields, computedFields.scriptFields); + + const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : []; + flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields; + + if (flatData.body._source) { + // exclude source fields for this index pattern specified by the user + const filter = fieldWildcardFilter(flatData.body._source.excludes, config.get('metaFields')); + flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter( + docvalueField => filter(docvalueField.field) + ); + } - // if we only want to search for certain fields - const fields = flatData.fields; - if (fields) { - // filter out the docvalue_fields, and script_fields to only include those that we are concerned with - flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields); - flatData.body.script_fields = _.pick(flatData.body.script_fields, fields); - - // request the remaining fields from both stored_fields and _source - const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields)); - flatData.body.stored_fields = remainingFields; - _.set(flatData.body, '_source.includes', remainingFields); - } + // if we only want to search for certain fields + const fields = flatData.fields; + if (fields) { + // filter out the docvalue_fields, and script_fields to only include those that we are concerned with + flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields); + flatData.body.script_fields = _.pick(flatData.body.script_fields, fields); + + // request the remaining fields from both stored_fields and _source + const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields)); + flatData.body.stored_fields = remainingFields; + _.set(flatData.body, '_source.includes', remainingFields); + } - const esQueryConfigs = getEsQueryConfig(config); - flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs); + const esQueryConfigs = getEsQueryConfig(config); + flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs); - if (flatData.highlightAll != null) { - if (flatData.highlightAll && flatData.body.query) { - flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig); - } - delete flatData.highlightAll; + if (flatData.highlightAll != null) { + if (flatData.highlightAll && flatData.body.query) { + flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig); } + delete flatData.highlightAll; + } - /** + /** * Translate a filter into a query to support es 3+ * @param {Object} filter - The filter to translate * @return {Object} the query version of that filter */ - const translateToQuery = function (filter) { - if (!filter) return; + const translateToQuery = function (filter) { + if (!filter) return; - if (filter.query) { - return filter.query; - } + if (filter.query) { + return filter.query; + } - return filter; - }; + return filter; + }; - // re-write filters within filter aggregations - (function recurse(aggBranch) { - if (!aggBranch) return; - Object.keys(aggBranch).forEach(function (id) { - const agg = aggBranch[id]; + // re-write filters within filter aggregations + (function recurse(aggBranch) { + if (!aggBranch) return; + Object.keys(aggBranch).forEach(function (id) { + const agg = aggBranch[id]; - if (agg.filters) { - // translate filters aggregations - const filters = agg.filters.filters; + if (agg.filters) { + // translate filters aggregations + const filters = agg.filters.filters; - Object.keys(filters).forEach(function (filterId) { - filters[filterId] = translateToQuery(filters[filterId]); - }); - } + Object.keys(filters).forEach(function (filterId) { + filters[filterId] = translateToQuery(filters[filterId]); + }); + } - recurse(agg.aggs || agg.aggregations); - }); - }(flatData.body.aggs || flatData.body.aggregations)); + recurse(agg.aggs || agg.aggregations); + }); + }(flatData.body.aggs || flatData.body.aggregations)); - return flatData; - }); - } + return flatData; + }); } - - return SearchSource; } diff --git a/src/legacy/ui/public/courier/search_source/search_source.test.js b/src/legacy/ui/public/courier/search_source/search_source.test.js new file mode 100644 index 0000000000000..800f4e4308671 --- /dev/null +++ b/src/legacy/ui/public/courier/search_source/search_source.test.js @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { SearchSource } from '../search_source'; + +jest.mock('ui/new_platform', () => ({ + npSetup: { + core: { + injectedMetadata: { + getInjectedVar: () => 0, + } + } + } +})); + +jest.mock('../fetch', () => ({ + fetchSoon: jest.fn(), +})); + +const indexPattern = { title: 'foo' }; +const indexPattern2 = { title: 'foo' }; + +describe('SearchSource', function () { + describe('#setField()', function () { + it('sets the value for the property', function () { + const searchSource = new SearchSource(); + searchSource.setField('aggs', 5); + expect(searchSource.getField('aggs')).toBe(5); + }); + + it('throws an error if the property is not accepted', function () { + const searchSource = new SearchSource(); + expect(() => searchSource.setField('index', 5)).toThrow(); + }); + }); + + describe('#getField()', function () { + it('gets the value for the property', function () { + const searchSource = new SearchSource(); + searchSource.setField('aggs', 5); + expect(searchSource.getField('aggs')).toBe(5); + }); + + it('throws an error if the property is not accepted', function () { + const searchSource = new SearchSource(); + expect(() => searchSource.getField('unacceptablePropName')).toThrow(); + }); + }); + + describe(`#setField('index')`, function () { + describe('auto-sourceFiltering', function () { + describe('new index pattern assigned', function () { + it('generates a searchSource filter', function () { + const searchSource = new SearchSource(); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + searchSource.setField('index', indexPattern); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(typeof searchSource.getField('source')).toBe('function'); + }); + + it('removes created searchSource filter on removal', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + }); + }); + + describe('new index pattern assigned over another', function () { + it('replaces searchSource filter with new', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + const searchSourceFilter1 = searchSource.getField('source'); + searchSource.setField('index', indexPattern2); + expect(searchSource.getField('index')).toBe(indexPattern2); + expect(typeof searchSource.getField('source')).toBe('function'); + expect(searchSource.getField('source')).not.toBe(searchSourceFilter1); + }); + + it('removes created searchSource filter on removal', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + searchSource.setField('index', indexPattern2); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + }); + }); + + describe('ip assigned before custom searchSource filter', function () { + it('custom searchSource filter becomes new searchSource', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('index', indexPattern); + expect(typeof searchSource.getField('source')).toBe('function'); + searchSource.setField('source', football); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(searchSource.getField('source')).toBe(football); + }); + + it('custom searchSource stays after removal', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('index', indexPattern); + searchSource.setField('source', football); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(football); + }); + }); + + describe('ip assigned after custom searchSource filter', function () { + it('leaves the custom filter in place', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('source', football); + searchSource.setField('index', indexPattern); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(searchSource.getField('source')).toBe(football); + }); + + it('custom searchSource stays after removal', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('source', football); + searchSource.setField('index', indexPattern); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(football); + }); + }); + }); + }); + + describe('#onRequestStart()', () => { + it('should be called when starting a request', () => { + const searchSource = new SearchSource(); + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const options = {}; + searchSource.requestIsStarting(options); + expect(fn).toBeCalledWith(searchSource, options); + }); + + it('should not be called on parent searchSource', () => { + const parent = new SearchSource(); + const searchSource = new SearchSource().setParent(parent); + + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const parentFn = jest.fn(); + parent.onRequestStart(parentFn); + const options = {}; + searchSource.requestIsStarting(options); + + expect(fn).toBeCalledWith(searchSource, options); + expect(parentFn).not.toBeCalled(); + }); + + it('should be called on parent searchSource if callParentStartHandlers is true', () => { + const parent = new SearchSource(); + const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true }); + + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const parentFn = jest.fn(); + parent.onRequestStart(parentFn); + const options = {}; + searchSource.requestIsStarting(options); + + expect(fn).toBeCalledWith(searchSource, options); + expect(parentFn).toBeCalledWith(searchSource, options); + }); + }); +}); diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js index 4b1f488ece128..7d9865c137e62 100644 --- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js +++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js @@ -19,48 +19,13 @@ import { addSearchStrategy } from './search_strategy_registry'; import { isDefaultTypeIndexPattern } from './is_default_type_index_pattern'; -import { SearchError } from './search_error'; -import { getSearchParams, getMSearchParams } from '../fetch/get_search_params'; - -function getAllFetchParams(searchRequests, Promise) { - return Promise.map(searchRequests, (searchRequest) => { - return Promise.try(searchRequest.getFetchParams, void 0, searchRequest) - .then((fetchParams) => { - return (searchRequest.fetchParams = fetchParams); - }) - .then(value => ({ resolved: value })) - .catch(error => ({ rejected: error })); - }); -} - -async function serializeAllFetchParams(fetchParams, searchRequests, serializeFetchParams) { - const searchRequestsWithFetchParams = []; - const failedSearchRequests = []; - - // Gather the fetch param responses from all the successful requests. - fetchParams.forEach((result, index) => { - if (result.resolved) { - searchRequestsWithFetchParams.push(result.resolved); - } else { - const searchRequest = searchRequests[index]; - - searchRequest.handleFailure(result.rejected); - failedSearchRequests.push(searchRequest); - } - }); - - return { - serializedFetchParams: await serializeFetchParams(searchRequestsWithFetchParams), - failedSearchRequests, - }; -} +import { getSearchParams, getMSearchParams, getPreference, getTimeout } from '../fetch/get_search_params'; export const defaultSearchStrategy = { id: 'default', search: params => { - const { config } = params; - return config.get('courier:batchSearches') ? msearch(params) : search(params); + return params.config.get('courier:batchSearches') ? msearch(params) : search(params); }, isViable: (indexPattern) => { @@ -72,79 +37,43 @@ export const defaultSearchStrategy = { }, }; -async function msearch({ searchRequests, es, Promise, serializeFetchParams, config }) { - // Flatten the searchSource within each searchRequest to get the fetch params, - // e.g. body, filters, index pattern, query. - const allFetchParams = await getAllFetchParams(searchRequests, Promise); - - // Serialize the fetch params into a format suitable for the body of an ES query. - const { - serializedFetchParams, - failedSearchRequests, - } = await serializeAllFetchParams(allFetchParams, searchRequests, serializeFetchParams); - - if (serializedFetchParams.trim() === '') { - return { - failedSearchRequests, +function msearch({ searchRequests, es, config, esShardTimeout }) { + const inlineRequests = searchRequests.map(({ index, body, search_type: searchType }) => { + const inlineHeader = { + index: index.title || index, + search_type: searchType, + ignore_unavailable: true, + preference: getPreference(config) }; - } - const msearchParams = { - ...getMSearchParams(config), - body: serializedFetchParams, - }; - - const searching = es.msearch(msearchParams); + const inlineBody = { + ...body, + timeout: getTimeout(esShardTimeout) + }; + return `${JSON.stringify(inlineHeader)}\n${JSON.stringify(inlineBody)}`; + }); + const searching = es.msearch({ + ...getMSearchParams(config), + body: `${inlineRequests.join('\n')}\n`, + }); return { - // Munge data into shape expected by consumer. - searching: new Promise((resolve, reject) => { - // Unwrap the responses object returned by the ES client. - searching.then(({ responses }) => { - resolve(responses); - }).catch(error => { - // Format ES client error as a SearchError. - const { statusCode, displayName, message, path } = error; - - const searchError = new SearchError({ - status: statusCode, - title: displayName, - message, - path, - }); - - reject(searchError); - }); - }), - abort: searching.abort, - failedSearchRequests, + searching: searching.then(({ responses }) => responses), + abort: searching.abort }; } -function search({ searchRequests, es, Promise, config, sessionId, esShardTimeout }) { - const failedSearchRequests = []; +function search({ searchRequests, es, config, esShardTimeout }) { const abortController = new AbortController(); - const searchParams = getSearchParams(config, sessionId, esShardTimeout); - const promises = searchRequests.map(async searchRequest => { - return searchRequest.getFetchParams() - .then(fetchParams => { - const { index, body } = searchRequest.fetchParams = fetchParams; - const promise = es.search({ index: index.title || index, body, ...searchParams }); - abortController.signal.addEventListener('abort', promise.abort); - return promise; - }, error => { - searchRequest.handleFailure(error); - failedSearchRequests.push(searchRequest); - }) - .catch(({ response }) => { - // Copying the _msearch behavior where the errors for individual requests are returned - // instead of thrown - return JSON.parse(response); - }); + const searchParams = getSearchParams(config, esShardTimeout); + const promises = searchRequests.map(({ index, body }) => { + const searching = es.search({ index: index.title || index, body, ...searchParams }) + .catch(({ response }) => JSON.parse(response)); + abortController.signal.addEventListener('abort', searching.abort); + return searching; }); return { searching: Promise.all(promises), abort: () => abortController.abort(), - failedSearchRequests }; } diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js index dc8732032ba22..953ca4fe800f1 100644 --- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js +++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js @@ -18,7 +18,6 @@ */ import { defaultSearchStrategy } from './default_search_strategy'; -import Bluebird from 'bluebird'; const { search } = defaultSearchStrategy; @@ -29,14 +28,12 @@ function getConfigStub(config = {}) { } describe('defaultSearchStrategy', function () { - describe('search', function () { - let searchArgs; beforeEach(() => { - const msearchMock = jest.fn().mockReturnValue(Bluebird.resolve([])); - const searchMock = jest.fn().mockReturnValue(Bluebird.resolve([])); + const msearchMock = jest.fn().mockReturnValue(Promise.resolve([])); + const searchMock = jest.fn().mockReturnValue(Promise.resolve([])); searchArgs = { searchRequests: [], @@ -44,8 +41,6 @@ describe('defaultSearchStrategy', function () { msearch: msearchMock, search: searchMock, }, - Promise: Bluebird, - serializeFetchParams: () => Bluebird.resolve('pretend this is a valid request body'), }; }); @@ -78,7 +73,5 @@ describe('defaultSearchStrategy', function () { await search(searchArgs); expect(searchArgs.es.msearch.mock.calls[0][0]).toHaveProperty('ignore_throttled', false); }); - }); - }); diff --git a/src/legacy/ui/public/courier/search_strategy/index.js b/src/legacy/ui/public/courier/search_strategy/index.js index 3f6d172426d0d..229d0cbb1da5d 100644 --- a/src/legacy/ui/public/courier/search_strategy/index.js +++ b/src/legacy/ui/public/courier/search_strategy/index.js @@ -18,9 +18,10 @@ */ export { - assignSearchRequestsToSearchStrategies, addSearchStrategy, hasSearchStategyForIndexPattern, + getSearchStrategyById, + getSearchStrategyForSearchRequest, } from './search_strategy_registry'; export { isDefaultTypeIndexPattern } from './is_default_type_index_pattern'; diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js index 3af93e4f16509..e67d39ea27aa6 100644 --- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js +++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js @@ -19,7 +19,7 @@ import { noOpSearchStrategy } from './no_op_search_strategy'; -const searchStrategies = []; +export const searchStrategies = []; export const addSearchStrategy = searchStrategy => { if (searchStrategies.includes(searchStrategy)) { @@ -29,28 +29,26 @@ export const addSearchStrategy = searchStrategy => { searchStrategies.push(searchStrategy); }; -const getSearchStrategyByViability = indexPattern => { +export const getSearchStrategyByViability = indexPattern => { return searchStrategies.find(searchStrategy => { return searchStrategy.isViable(indexPattern); }); }; -const getSearchStrategyById = searchStrategyId => { +export const getSearchStrategyById = searchStrategyId => { return searchStrategies.find(searchStrategy => { return searchStrategy.id === searchStrategyId; }); }; -const getSearchStrategyForSearchRequest = searchRequest => { +export const getSearchStrategyForSearchRequest = (searchRequest, { searchStrategyId } = {}) => { // Allow the searchSource to declare the correct strategy with which to execute its searches. - const preferredSearchStrategyId = searchRequest.source.getPreferredSearchStrategyId(); - if (preferredSearchStrategyId != null) { - return getSearchStrategyById(preferredSearchStrategyId); + if (searchStrategyId != null) { + return getSearchStrategyById(searchStrategyId); } // Otherwise try to match it to a strategy. - const indexPattern = searchRequest.source.getField('index'); - const viableSearchStrategy = getSearchStrategyByViability(indexPattern); + const viableSearchStrategy = getSearchStrategyByViability(searchRequest.index); if (viableSearchStrategy) { return viableSearchStrategy; @@ -60,47 +58,6 @@ const getSearchStrategyForSearchRequest = searchRequest => { return noOpSearchStrategy; }; - -/** - * Build a structure like this: - * - * [{ - * searchStrategy: rollupSearchStrategy, - * searchRequests: [], - * }, { - * searchStrategy: defaultSearchStrategy, - * searchRequests: [], - * }] - * - * We use an array of objects to preserve the order of the search requests, which we use to - * deterministically associate each response with the originating request. - */ -export const assignSearchRequestsToSearchStrategies = searchRequests => { - const searchStrategiesWithRequests = []; - const searchStrategyById = {}; - - searchRequests.forEach(searchRequest => { - const matchingSearchStrategy = getSearchStrategyForSearchRequest(searchRequest); - const { id } = matchingSearchStrategy; - let searchStrategyWithRequest = searchStrategyById[id]; - - // Create the data structure if we don't already have it. - if (!searchStrategyWithRequest) { - searchStrategyWithRequest = { - searchStrategy: matchingSearchStrategy, - searchRequests: [], - }; - - searchStrategyById[id] = searchStrategyWithRequest; - searchStrategiesWithRequests.push(searchStrategyWithRequest); - } - - searchStrategyWithRequest.searchRequests.push(searchRequest); - }); - - return searchStrategiesWithRequests; -}; - export const hasSearchStategyForIndexPattern = indexPattern => { return Boolean(getSearchStrategyByViability(indexPattern)); }; diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js index 5f7e14082d577..362d303eb6203 100644 --- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js +++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js @@ -17,79 +17,98 @@ * under the License. */ +import { noOpSearchStrategy } from './no_op_search_strategy'; import { - assignSearchRequestsToSearchStrategies, + searchStrategies, addSearchStrategy, + getSearchStrategyByViability, + getSearchStrategyById, + getSearchStrategyForSearchRequest, + hasSearchStategyForIndexPattern } from './search_strategy_registry'; -import { noOpSearchStrategy } from './no_op_search_strategy'; +const mockSearchStrategies = [{ + id: 0, + isViable: index => index === 0 +}, { + id: 1, + isViable: index => index === 1 +}]; + +describe('Search strategy registry', () => { + beforeEach(() => { + searchStrategies.length = 0; + }); + + describe('addSearchStrategy', () => { + it('adds a search strategy', () => { + addSearchStrategy(mockSearchStrategies[0]); + expect(searchStrategies.length).toBe(1); + }); + + it('does not add a search strategy if it is already included', () => { + addSearchStrategy(mockSearchStrategies[0]); + addSearchStrategy(mockSearchStrategies[0]); + expect(searchStrategies.length).toBe(1); + }); + }); + + describe('getSearchStrategyByViability', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); + + it('returns the viable strategy', () => { + expect(getSearchStrategyByViability(0)).toBe(mockSearchStrategies[0]); + expect(getSearchStrategyByViability(1)).toBe(mockSearchStrategies[1]); + }); + + it('returns undefined if there is no viable strategy', () => { + expect(getSearchStrategyByViability(-1)).toBe(undefined); + }); + }); + + describe('getSearchStrategyById', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); + + it('returns the strategy by ID', () => { + expect(getSearchStrategyById(0)).toBe(mockSearchStrategies[0]); + expect(getSearchStrategyById(1)).toBe(mockSearchStrategies[1]); + }); -describe('SearchStrategyRegistry', () => { - describe('assignSearchRequestsToSearchStrategies', () => { - test('associates search requests with valid search strategies', () => { - const searchStrategyA = { - id: 'a', - isViable: indexPattern => { - return indexPattern === 'a'; - }, - }; - - addSearchStrategy(searchStrategyA); - - const searchStrategyB = { - id: 'b', - isViable: indexPattern => { - return indexPattern === 'b'; - }, - }; - - addSearchStrategy(searchStrategyB); - - const searchRequest0 = { - id: 0, - source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest1 = { - id: 1, - source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest2 = { - id: 2, - source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest3 = { - id: 3, - source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequests = [ searchRequest0, searchRequest1, searchRequest2, searchRequest3]; - const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests); - - expect(searchStrategiesWithSearchRequests).toEqual([{ - searchStrategy: searchStrategyB, - searchRequests: [ searchRequest0, searchRequest3 ], - }, { - searchStrategy: searchStrategyA, - searchRequests: [ searchRequest1, searchRequest2 ], - }]); + it('returns undefined if there is no strategy with that ID', () => { + expect(getSearchStrategyById(-1)).toBe(undefined); }); + }); - test(`associates search requests with noOpSearchStrategy when a viable one can't be found`, () => { - const searchRequest0 = { - id: 0, - source: { getField: () => {}, getPreferredSearchStrategyId: () => {} }, - }; + describe('getSearchStrategyForSearchRequest', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); - const searchRequests = [ searchRequest0 ]; - const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests); + it('returns the strategy by ID if provided', () => { + expect(getSearchStrategyForSearchRequest({}, { searchStrategyId: 1 })).toBe(mockSearchStrategies[1]); + }); + + it('returns the strategy by viability if there is one', () => { + expect(getSearchStrategyForSearchRequest({ index: 1 })).toBe(mockSearchStrategies[1]); + }); + + it('returns the no op strategy if there is no viable strategy', () => { + expect(getSearchStrategyForSearchRequest({ index: 3 })).toBe(noOpSearchStrategy); + }); + }); + + describe('hasSearchStategyForIndexPattern', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); - expect(searchStrategiesWithSearchRequests).toEqual([{ - searchStrategy: noOpSearchStrategy, - searchRequests: [ searchRequest0 ], - }]); + it('returns whether there is a search strategy for this index pattern', () => { + expect(hasSearchStategyForIndexPattern(0)).toBe(true); + expect(hasSearchStategyForIndexPattern(-1)).toBe(false); }); }); }); diff --git a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss deleted file mode 100644 index 769abea150199..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss +++ /dev/null @@ -1,3 +0,0 @@ -.kbnError--multi-allow-explicit-index { - padding: $euiSizeL; -} diff --git a/src/legacy/ui/public/error_allow_explicit_index/_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_index.scss deleted file mode 100644 index 84cb111127679..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/_index.scss +++ /dev/null @@ -1 +0,0 @@ -@import './error_allow_explicit_index'; diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html deleted file mode 100644 index e61383b11101a..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html +++ /dev/null @@ -1,48 +0,0 @@ -
-

- - - -

- -

- -

- -

-
    -
  1. -
  2. -
  3. -
-
diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js deleted file mode 100644 index 35763d8dd0385..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { i18n } from '@kbn/i18n'; -import { get } from 'lodash'; - -import uiRoutes from '../routes'; -import { KbnUrlProvider } from '../url'; - -import template from './error_allow_explicit_index.html'; - -uiRoutes - .when('/error/multi.allow_explicit_index', { - template, - k7Breadcrumbs: () => [{ text: i18n.translate('common.ui.errorAllowExplicitIndex.breadcrumbs.errorText', { defaultMessage: 'Error' }) }], - }); - -export function ErrorAllowExplicitIndexProvider(Private, Promise) { - const kbnUrl = Private(KbnUrlProvider); - - return new (class ErrorAllowExplicitIndex { - test(error) { - if (!error || error.status !== 400) { - return false; - } - - const type = get(error, 'body.error.type'); - const reason = get(error, 'body.error.reason'); - - return ( - type === 'illegal_argument_exception' && - String(reason).includes('explicit index') - ); - } - - takeover() { - kbnUrl.change('/error/multi.allow_explicit_index'); - return Promise.halt(); - } - }); -} diff --git a/src/legacy/ui/public/error_allow_explicit_index/index.js b/src/legacy/ui/public/error_allow_explicit_index/index.js deleted file mode 100644 index a832fde31c987..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { ErrorAllowExplicitIndexProvider } from './error_allow_explicit_index'; diff --git a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js index aeffdbc8bfa6c..a15c602b7ba83 100644 --- a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js +++ b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js @@ -20,19 +20,12 @@ import expect from '@kbn/expect'; import ngMock from 'ng_mock'; -import { FieldWildcardProvider } from '../../field_wildcard'; +import { fieldWildcardFilter, makeRegEx } from '../../field_wildcard'; describe('fieldWildcard', function () { - let fieldWildcardFilter; - let makeRegEx; + const metaFields = ['_id', '_type', '_source']; beforeEach(ngMock.module('kibana')); - beforeEach(ngMock.inject(function (config, Private) { - config.set('metaFields', ['_id', '_type', '_source']); - const fieldWildcard = Private(FieldWildcardProvider); - fieldWildcardFilter = fieldWildcard.fieldWildcardFilter; - makeRegEx = fieldWildcard.makeRegEx; - })); describe('makeRegEx', function () { it('matches * in any position', function () { @@ -70,7 +63,7 @@ describe('fieldWildcard', function () { }); it('filters nothing when given an empty array', function () { - const filter = fieldWildcardFilter([]); + const filter = fieldWildcardFilter([], metaFields); const original = [ 'foo', 'bar', @@ -82,7 +75,7 @@ describe('fieldWildcard', function () { }); it('does not filter metaFields', function () { - const filter = fieldWildcardFilter([ '_*' ]); + const filter = fieldWildcardFilter([ '_*' ], metaFields); const original = [ '_id', @@ -97,7 +90,7 @@ describe('fieldWildcard', function () { const filter = fieldWildcardFilter([ 'f*', '*4' - ]); + ], metaFields); const original = [ 'foo', @@ -114,7 +107,7 @@ describe('fieldWildcard', function () { 'f*', '*4', 'undefined' - ]); + ], metaFields); const original = [ 'foo', diff --git a/src/legacy/ui/public/field_wildcard/field_wildcard.js b/src/legacy/ui/public/field_wildcard/field_wildcard.js index f73997d40a4e4..656641b20a98c 100644 --- a/src/legacy/ui/public/field_wildcard/field_wildcard.js +++ b/src/legacy/ui/public/field_wildcard/field_wildcard.js @@ -19,31 +19,25 @@ import { escapeRegExp, memoize } from 'lodash'; -export function FieldWildcardProvider(config) { - const metaFields = config.get('metaFields'); +export const makeRegEx = memoize(function makeRegEx(glob) { + return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$'); +}); - const makeRegEx = memoize(function makeRegEx(glob) { - return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$'); - }); - - // Note that this will return an essentially noop function if globs is undefined. - function fieldWildcardMatcher(globs = []) { - return function matcher(val) { - // do not test metaFields or keyword - if (metaFields.indexOf(val) !== -1) { - return false; - } - return globs.some(p => makeRegEx(p).test(val)); - }; - } - - // Note that this will return an essentially noop function if globs is undefined. - function fieldWildcardFilter(globs = []) { - const matcher = fieldWildcardMatcher(globs); - return function filter(val) { - return !matcher(val); - }; - } +// Note that this will return an essentially noop function if globs is undefined. +export function fieldWildcardMatcher(globs = [], metaFields) { + return function matcher(val) { + // do not test metaFields or keyword + if (metaFields.indexOf(val) !== -1) { + return false; + } + return globs.some(p => makeRegEx(p).test(val)); + }; +} - return { makeRegEx, fieldWildcardMatcher, fieldWildcardFilter }; +// Note that this will return an essentially noop function if globs is undefined. +export function fieldWildcardFilter(globs = [], metaFields = []) { + const matcher = fieldWildcardMatcher(globs, metaFields); + return function filter(val) { + return !matcher(val); + }; } diff --git a/src/legacy/ui/public/field_wildcard/index.js b/src/legacy/ui/public/field_wildcard/index.js index d03643f8804d8..db9f830e450b8 100644 --- a/src/legacy/ui/public/field_wildcard/index.js +++ b/src/legacy/ui/public/field_wildcard/index.js @@ -17,4 +17,4 @@ * under the License. */ -export { FieldWildcardProvider } from './field_wildcard'; +export * from './field_wildcard'; diff --git a/src/legacy/ui/public/filter_manager/__tests__/filter_generator.js b/src/legacy/ui/public/filter_manager/__tests__/filter_generator.js index e7752b1e4b906..dee7942f50c02 100644 --- a/src/legacy/ui/public/filter_manager/__tests__/filter_generator.js +++ b/src/legacy/ui/public/filter_manager/__tests__/filter_generator.js @@ -44,7 +44,6 @@ function checkAddFilters(length, comps, idx) { describe('Filter Manager', function () { beforeEach(ngMock.module( 'kibana', - 'kibana/courier', 'kibana/global_state', function ($provide) { $provide.service('indexPatterns', require('fixtures/mock_index_patterns')); diff --git a/src/legacy/ui/public/legacy_compat/angular_config.tsx b/src/legacy/ui/public/legacy_compat/angular_config.tsx index 28d57e9f8e8c9..8eac31e24530c 100644 --- a/src/legacy/ui/public/legacy_compat/angular_config.tsx +++ b/src/legacy/ui/public/legacy_compat/angular_config.tsx @@ -64,7 +64,6 @@ export const configureAppAngularModule = (angularModule: IModule) => { .value('buildNum', legacyMetadata.buildNum) .value('buildSha', legacyMetadata.buildSha) .value('serverName', legacyMetadata.serverName) - .value('sessionId', Date.now()) .value('esUrl', getEsUrl(newPlatform)) .value('uiCapabilities', capabilities.get()) .config(setupCompileProvider(newPlatform)) diff --git a/src/legacy/ui/public/management/components/sidebar_nav.tsx b/src/legacy/ui/public/management/components/sidebar_nav.tsx index ef232c7ef7eda..f0ac787e0ef44 100644 --- a/src/legacy/ui/public/management/components/sidebar_nav.tsx +++ b/src/legacy/ui/public/management/components/sidebar_nav.tsx @@ -19,6 +19,7 @@ import { EuiIcon, EuiSideNav, IconType } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; +import { i18n } from '@kbn/i18n'; import React from 'react'; import { IndexedArray } from 'ui/indexed_array'; @@ -73,6 +74,9 @@ export class SidebarNav extends React.Component { - if (this.searchSource) { - this.searchSource.cancelQueued(); - } - }; + this.destroy = () => {}; /** * Delete this object from Elasticsearch diff --git a/src/legacy/ui/public/vis/vis.js b/src/legacy/ui/public/vis/vis.js index c34fc1b10378e..c1fff1556e3ad 100644 --- a/src/legacy/ui/public/vis/vis.js +++ b/src/legacy/ui/public/vis/vis.js @@ -33,14 +33,13 @@ import '../render_complete/directive'; import { AggConfigs } from '../agg_types/agg_configs'; import { PersistedState } from '../persisted_state'; import { updateVisualizationConfig } from './vis_update'; -import { SearchSourceProvider } from '../courier/search_source'; +import { SearchSource } from '../courier'; import { start as visualizations } from '../../../core_plugins/visualizations/public/np_ready/public/legacy'; import '../directives/bind'; export function VisProvider(Private, getAppState) { const visTypes = visualizations.types; - const SearchSource = Private(SearchSourceProvider); class Vis extends EventEmitter { constructor(indexPattern, visState) { diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts index 9d6b56c32f1cb..c73f787457a03 100644 --- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts +++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts @@ -18,6 +18,7 @@ */ jest.mock('ui/new_platform'); +import { searchSourceMock } from '../../courier/search_source/mocks'; import { mockDataLoaderFetch, timefilter } from './embedded_visualize_handler.test.mocks'; import _ from 'lodash'; @@ -85,7 +86,7 @@ describe('EmbeddedVisualizeHandler', () => { inspectorAdapters: {}, query: undefined, queryFilter: null, - searchSource: undefined, + searchSource: searchSourceMock, timeRange: undefined, uiState: undefined, }; @@ -96,7 +97,7 @@ describe('EmbeddedVisualizeHandler', () => { { vis: mockVis, title: 'My Vis', - searchSource: undefined, + searchSource: searchSourceMock, destroy: () => ({}), copyOnSave: false, save: () => Promise.resolve('123'), @@ -128,7 +129,7 @@ describe('EmbeddedVisualizeHandler', () => { { vis: mockVis, title: 'My Vis', - searchSource: undefined, + searchSource: searchSourceMock, destroy: () => ({}), copyOnSave: false, save: () => Promise.resolve('123'), diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts index 119ec8a004239..bc2152911d1ec 100644 --- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts +++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts @@ -518,9 +518,9 @@ export class EmbeddedVisualizeHandler { // If the data loader was aborted then no need to surface this error in the UI if (error && error.name === 'AbortError') return; - // TODO: come up with a general way to cancel execution of pipeline expressions. - if (this.dataLoaderParams.searchSource && this.dataLoaderParams.searchSource.cancelQueued) { - this.dataLoaderParams.searchSource.cancelQueued(); + // Cancel execution of pipeline expressions + if (this.abortController) { + this.abortController.abort(); } this.vis.requestError = error; diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts index f644d7f52d458..0f9f04c87fc6f 100644 --- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts +++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts @@ -28,7 +28,7 @@ import { } from './build_pipeline'; import { Vis, VisState } from 'ui/vis'; import { AggConfig } from 'ui/agg_types/agg_config'; -import { SearchSource } from 'ui/courier'; +import { searchSourceMock } from 'ui/courier/search_source/mocks'; jest.mock('ui/new_platform'); jest.mock('ui/agg_types/buckets/date_histogram', () => ({ @@ -348,10 +348,7 @@ describe('visualize loader pipeline helpers: build pipeline', () => { toExpression: () => 'testing custom expressions', }, }; - const searchSource: SearchSource = { - getField: () => null, - }; - const expression = await buildPipeline(vis, { searchSource }); + const expression = await buildPipeline(vis, { searchSource: searchSourceMock }); expect(expression).toMatchSnapshot(); }); }); diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts index e8f1faf915eaf..a0d5b7b36d7f6 100644 --- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts +++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts @@ -442,18 +442,9 @@ export const buildVislibDimensions = async ( } else if (xAgg.type.name === 'histogram') { const intervalParam = xAgg.type.paramByName('interval'); const output = { params: {} as any }; - const searchRequest = { - whenAborted: (fn: any) => { - if (params.abortSignal) { - params.abortSignal.addEventListener('abort', fn); - } - }, - }; - await intervalParam.modifyAggConfigOnSearchRequestStart( - xAgg, - params.searchSource, - searchRequest - ); + await intervalParam.modifyAggConfigOnSearchRequestStart(xAgg, params.searchSource, { + abortSignal: params.abortSignal, + }); intervalParam.write(xAgg, output); dimensions.x.params.interval = output.params.interval; } diff --git a/src/legacy/ui/ui_render/bootstrap/template.js.hbs b/src/legacy/ui/ui_render/bootstrap/template.js.hbs index d305626057540..d8a55935b705a 100644 --- a/src/legacy/ui/ui_render/bootstrap/template.js.hbs +++ b/src/legacy/ui/ui_render/bootstrap/template.js.hbs @@ -46,7 +46,7 @@ if (window.__kbnStrictCsp__ && window.__kbnCspNotEnforced__) { function createJavascriptElement(path) { var dom = document.createElement('script'); - dom.setAttribute('async', ''); + dom.setAttribute('defer', 'defer'); dom.addEventListener('error', failure); dom.setAttribute('src', file); dom.addEventListener('load', next); diff --git a/test/api_integration/apis/core/index.js b/test/api_integration/apis/core/index.js index d617b2ad07351..e5da4e4730662 100644 --- a/test/api_integration/apis/core/index.js +++ b/test/api_integration/apis/core/index.js @@ -16,21 +16,45 @@ * specific language governing permissions and limitations * under the License. */ +import expect from '@kbn/expect'; export default function ({ getService }) { const supertest = getService('supertest'); - describe('core request context', () => { - it('provides access to elasticsearch', async () => ( - await supertest - .get('/requestcontext/elasticsearch') - .expect(200, 'Elasticsearch: true') - )); + describe('core', () => { + describe('request context', () => { + it('provides access to elasticsearch', async () => ( + await supertest + .get('/requestcontext/elasticsearch') + .expect(200, 'Elasticsearch: true') + )); - it('provides access to SavedObjects client', async () => ( - await supertest - .get('/requestcontext/savedobjectsclient') - .expect(200, 'SavedObjects client: {"page":1,"per_page":20,"total":0,"saved_objects":[]}') - )); + it('provides access to SavedObjects client', async () => ( + await supertest + .get('/requestcontext/savedobjectsclient') + .expect(200, 'SavedObjects client: {"page":1,"per_page":20,"total":0,"saved_objects":[]}') + )); + }); + + describe('compression', () => { + it(`uses compression when there isn't a referer`, async () => { + await supertest + .get('/app/kibana') + .set('accept-encoding', 'gzip') + .then(response => { + expect(response.headers).to.have.property('content-encoding', 'gzip'); + }); + }); + + it(`doesn't use compression when there is a referer`, async () => { + await supertest + .get('/app/kibana') + .set('accept-encoding', 'gzip') + .set('referer', 'https://www.google.com') + .then(response => { + expect(response.headers).not.to.have.property('content-encoding'); + }); + }); + }); }); } diff --git a/test/api_integration/apis/index.js b/test/api_integration/apis/index.js index 9f2672959390c..de36ee678b10e 100644 --- a/test/api_integration/apis/index.js +++ b/test/api_integration/apis/index.js @@ -34,5 +34,6 @@ export default function ({ loadTestFile }) { loadTestFile(require.resolve('./status')); loadTestFile(require.resolve('./stats')); loadTestFile(require.resolve('./ui_metric')); + loadTestFile(require.resolve('./core')); }); } diff --git a/test/api_integration/apis/saved_objects/bulk_update.js b/test/api_integration/apis/saved_objects/bulk_update.js new file mode 100644 index 0000000000000..4bdf257ceef02 --- /dev/null +++ b/test/api_integration/apis/saved_objects/bulk_update.js @@ -0,0 +1,274 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import expect from '@kbn/expect'; +import _ from 'lodash'; + +export default function ({ getService }) { + const supertest = getService('supertest'); + const es = getService('es'); + const esArchiver = getService('esArchiver'); + + + describe('bulkUpdate', () => { + describe('with kibana index', () => { + before(() => esArchiver.load('saved_objects/basic')); + after(() => esArchiver.unload('saved_objects/basic')); + it('should return 200', async () => { + const response = await supertest + .put(`/api/saved_objects/_bulk_update`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing visualization' + } + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard' + } + }, + ]) + .expect(200); + + const { saved_objects: [ firstObject, secondObject ] } = response.body; + + // loose ISO8601 UTC time with milliseconds validation + expect(firstObject).to.have.property('updated_at').match(/^[\d-]{10}T[\d:\.]{12}Z$/); + expect(_.omit(firstObject, ['updated_at'])).to.eql({ + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + type: 'visualization', + version: 'WzgsMV0=', + attributes: { + title: 'An existing visualization', + }, + }); + + expect(secondObject).to.have.property('updated_at').match(/^[\d-]{10}T[\d:\.]{12}Z$/); + expect(_.omit(secondObject, ['updated_at'])).to.eql({ + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + type: 'dashboard', + version: 'WzksMV0=', + attributes: { + title: 'An existing dashboard', + }, + }); + }); + + it('does not pass references if omitted', async () => { + const { body: { saved_objects: [ visObject, dashObject ] } } = await supertest + .post(`/api/saved_objects/_bulk_get`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + } + ]); + + const response = await supertest + .put(`/api/saved_objects/_bulk_update`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'Changed title but nothing else' + }, + version: visObject.version + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'Changed title and references' + }, + version: dashObject.version, + references: [{ id: 'foo', name: 'Foo', type: 'visualization' }] + }, + ]) + .expect(200); + + const { saved_objects: [ firstUpdatedObject, secondUpdatedObject ] } = response.body; + expect(firstUpdatedObject).to.not.have.property('error'); + expect(secondUpdatedObject).to.not.have.property('error'); + + const { body: { saved_objects: [ visObjectAfterUpdate, dashObjectAfterUpdate ] } } = await supertest + .post(`/api/saved_objects/_bulk_get`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + } + ]); + + expect(visObjectAfterUpdate.references).to.eql(visObject.references); + expect(dashObjectAfterUpdate.references).to.eql([{ id: 'foo', name: 'Foo', type: 'visualization' }]); + }); + + it('passes empty references array if empty references array is provided', async () => { + const { body: { saved_objects: [ { version } ] } } = await supertest + .post(`/api/saved_objects/_bulk_get`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + } + ]); + + await supertest + .put(`/api/saved_objects/_bulk_update`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'Changed title but nothing else' + }, + version, + references: [] + } + ]) + .expect(200); + + const { body: { saved_objects: [ visObjectAfterUpdate ] } } = await supertest + .post(`/api/saved_objects/_bulk_get`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + } + ]); + + expect(visObjectAfterUpdate.references).to.eql([]); + }); + + describe('unknown id', () => { + it('should return a generic 404', async () => { + const response = await supertest + .put(`/api/saved_objects/_bulk_update`) + .send([ + { + type: 'visualization', + id: 'not an id', + attributes: { + title: 'An existing visualization' + } + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard' + } + }, + ]) + .expect(200); + + const { saved_objects: [ missingObject, updatedObject ] } = response.body; + + // loose ISO8601 UTC time with milliseconds validation + expect(missingObject).eql({ + type: 'visualization', + id: 'not an id', + error: { + statusCode: 404, + error: 'Not Found', + message: 'Saved object [visualization/not an id] not found' + } + }); + + expect(updatedObject).to.have.property('updated_at').match(/^[\d-]{10}T[\d:\.]{12}Z$/); + expect(_.omit(updatedObject, ['updated_at', 'version'])).to.eql({ + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + type: 'dashboard', + attributes: { + title: 'An existing dashboard', + }, + }); + }); + }); + }); + + describe('without kibana index', () => { + before(async () => ( + // just in case the kibana server has recreated it + await es.indices.delete({ + index: '.kibana', + ignore: [404], + }) + )); + + it('should return generic 404', async () => { + const response = await supertest + .put(`/api/saved_objects/_bulk_update`) + .send([ + { + type: 'visualization', + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing visualization' + } + }, + { + type: 'dashboard', + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + attributes: { + title: 'An existing dashboard' + } + }, + ]) + .expect(200); + + const { saved_objects: [ firstObject, secondObject ] } = response.body; + + expect(firstObject).to.eql({ + id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', + type: 'visualization', + error: { + statusCode: 404, + error: 'Not Found', + message: 'Saved object [visualization/dd7caf20-9efd-11e7-acb3-3dab96693fab] not found' + }, + }); + + expect(secondObject).to.eql({ + id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', + type: 'dashboard', + error: { + statusCode: 404, + error: 'Not Found', + message: 'Saved object [dashboard/be3733a0-9efe-11e7-acb3-3dab96693fab] not found' + }, + }); + }); + }); + }); +} diff --git a/test/api_integration/apis/saved_objects/index.js b/test/api_integration/apis/saved_objects/index.js index 8259941f6d58c..ad6c3749181dd 100644 --- a/test/api_integration/apis/saved_objects/index.js +++ b/test/api_integration/apis/saved_objects/index.js @@ -29,6 +29,7 @@ export default function ({ loadTestFile }) { loadTestFile(require.resolve('./import')); loadTestFile(require.resolve('./resolve_import_errors')); loadTestFile(require.resolve('./update')); + loadTestFile(require.resolve('./bulk_update')); loadTestFile(require.resolve('./migrations')); }); } diff --git a/test/functional/apps/dashboard/time_zones.js b/test/functional/apps/dashboard/time_zones.js index eca536a1389d5..533194ba03e6f 100644 --- a/test/functional/apps/dashboard/time_zones.js +++ b/test/functional/apps/dashboard/time_zones.js @@ -22,6 +22,8 @@ import expect from '@kbn/expect'; export default function ({ getService, getPageObjects }) { const pieChart = getService('pieChart'); + const browser = getService('browser'); + const kibanaServer = getService('kibanaServer'); const PageObjects = getPageObjects(['dashboard', 'timePicker', 'settings', 'common']); describe('dashboard time zones', function () { @@ -30,15 +32,14 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.navigateTo(); await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', 'timezonetest_6_2_4.json')); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.common.navigateToApp('dashboard'); await PageObjects.dashboard.loadSavedDashboard('time zone test'); }); after(async () => { - await PageObjects.settings.navigateTo(); - await PageObjects.settings.clickKibanaSettings(); - await PageObjects.settings.setAdvancedSettingsSelect('dateFormat:tz', 'UTC'); - await PageObjects.common.navigateToApp('dashboard'); + await kibanaServer.uiSettings.replace({ 'dateFormat:tz': 'UTC' }); + await browser.refresh(); }); it('Exported dashboard adjusts EST time to UTC', async () => { diff --git a/test/functional/apps/management/_import_objects.js b/test/functional/apps/management/_import_objects.js index 58ced7d9ab5ad..bf55532dccad4 100644 --- a/test/functional/apps/management/_import_objects.js +++ b/test/functional/apps/management/_import_objects.js @@ -42,7 +42,7 @@ export default function ({ getService, getPageObjects }) { it('should import saved objects', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); const objects = await PageObjects.settings.getSavedObjectsInTable(); @@ -53,7 +53,7 @@ export default function ({ getService, getPageObjects }) { it('should provide dialog to allow the importing of saved objects with index pattern conflicts', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_conflicts.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('d1e4c910-a2e6-11e7-bb30-233be9be6a15', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -71,7 +71,7 @@ export default function ({ getService, getPageObjects }) { // so that we can override the existing visualization. await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_exists.ndjson'), false); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('logstash-*', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); @@ -89,7 +89,7 @@ export default function ({ getService, getPageObjects }) { // so that we can be prompted to override the existing visualization. await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_exists.ndjson'), false); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('logstash-*', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); @@ -103,13 +103,13 @@ export default function ({ getService, getPageObjects }) { it('should import saved objects linked to saved searches', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_saved_search.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.navigateTo(); await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_connected_to_saved_search.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -122,7 +122,7 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.navigateTo(); await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_connected_to_saved_search.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkNoneImported(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -140,7 +140,7 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_with_saved_search.ndjson')); // Wait for all the saves to happen - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.clickConfirmChanges(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -154,7 +154,7 @@ export default function ({ getService, getPageObjects }) { // First, import the objects await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_with_index_patterns.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); // Wait for all the saves to happen await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -173,7 +173,7 @@ export default function ({ getService, getPageObjects }) { // Then, import the objects await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_with_index_patterns.ndjson')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); // Wait for all the saves to happen await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -199,7 +199,7 @@ export default function ({ getService, getPageObjects }) { it('should import saved objects', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); const objects = await PageObjects.settings.getSavedObjectsInTable(); @@ -210,7 +210,8 @@ export default function ({ getService, getPageObjects }) { it('should provide dialog to allow the importing of saved objects with index pattern conflicts', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects-conflicts.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportLegacyWarning(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('d1e4c910-a2e6-11e7-bb30-233be9be6a15', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -228,7 +229,8 @@ export default function ({ getService, getPageObjects }) { // so that we can override the existing visualization. await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_exists.json'), false); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportLegacyWarning(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('logstash-*', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); @@ -246,7 +248,8 @@ export default function ({ getService, getPageObjects }) { // so that we can be prompted to override the existing visualization. await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_exists.json'), false); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportLegacyWarning(); + await PageObjects.settings.checkImportConflictsWarning(); await PageObjects.settings.associateIndexPattern('logstash-*', 'logstash-*'); await PageObjects.settings.clickConfirmChanges(); @@ -260,13 +263,13 @@ export default function ({ getService, getPageObjects }) { it('should import saved objects linked to saved searches', async function () { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_saved_search.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.navigateTo(); await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_connected_to_saved_search.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -279,7 +282,7 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.navigateTo(); await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_connected_to_saved_search.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportFailedWarning(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -293,7 +296,7 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_saved_search.json')); // Wait for all the saves to happen - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); // Second, we need to delete the index pattern @@ -307,7 +310,7 @@ export default function ({ getService, getPageObjects }) { await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_connected_to_saved_search.json')); // Wait for all the saves to happen - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkNoneImported(); await PageObjects.settings.clickImportDone(); await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -320,7 +323,7 @@ export default function ({ getService, getPageObjects }) { // First, import the objects await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_with_index_patterns.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportFailedWarning(); await PageObjects.settings.clickImportDone(); // Wait for all the saves to happen await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); @@ -339,7 +342,7 @@ export default function ({ getService, getPageObjects }) { // Then, import the objects await PageObjects.settings.clickKibanaSavedObjects(); await PageObjects.settings.importFile(path.join(__dirname, 'exports', '_import_objects_with_index_patterns.json')); - await PageObjects.header.waitUntilLoadingHasFinished(); + await PageObjects.settings.checkImportSucceeded(); await PageObjects.settings.clickImportDone(); // Wait for all the saves to happen await PageObjects.settings.waitUntilSavedObjectsTableIsNotLoading(); diff --git a/test/functional/apps/management/_kibana_settings.js b/test/functional/apps/management/_kibana_settings.js index 41cb6e5828459..9fb302cdba00a 100644 --- a/test/functional/apps/management/_kibana_settings.js +++ b/test/functional/apps/management/_kibana_settings.js @@ -98,8 +98,8 @@ export default function ({ getService, getPageObjects }) { }); after(async function () { - await PageObjects.settings.clickKibanaSettings(); - await PageObjects.settings.setAdvancedSettingsSelect('dateFormat:tz', 'UTC'); + await kibanaServer.uiSettings.replace({ 'dateFormat:tz': 'UTC' }); + await browser.refresh(); }); }); } diff --git a/test/functional/apps/management/index.js b/test/functional/apps/management/index.js index 4d4031b4e489b..e68920f06372a 100644 --- a/test/functional/apps/management/index.js +++ b/test/functional/apps/management/index.js @@ -42,6 +42,8 @@ export default function ({ getService, loadTestFile }) { loadTestFile(require.resolve('./_kibana_settings')); loadTestFile(require.resolve('./_scripted_fields')); loadTestFile(require.resolve('./_scripted_fields_preview')); + loadTestFile(require.resolve('./_mgmt_import_saved_objects')); + }); describe('', function () { diff --git a/test/functional/page_objects/settings_page.js b/test/functional/page_objects/settings_page.js index cd3741e5f5bcc..60f3d3a7e9eab 100644 --- a/test/functional/page_objects/settings_page.js +++ b/test/functional/page_objects/settings_page.js @@ -239,8 +239,8 @@ export function SettingsPageProvider({ getService, getPageObjects }) { async setScriptedFieldLanguageFilter(language) { await find.clickByCssSelector( 'select[data-test-subj="scriptedFieldLanguageFilterDropdown"] > option[label="' + - language + - '"]' + language + + '"]' ); } @@ -287,9 +287,14 @@ export function SettingsPageProvider({ getService, getPageObjects }) { await indexLink.click(); } + async getIndexPatternList() { + await testSubjects.existOrFail('indexPatternTable', { timeout: 5000 }); + return await find.allByCssSelector('[data-test-subj="indexPatternTable"] .euiTable a'); + } + async isIndexPatternListEmpty() { await testSubjects.existOrFail('indexPatternTable', { timeout: 5000 }); - const indexPatternList = await find.allByCssSelector('[data-test-subj="indexPatternTable"] .euiTable a'); + const indexPatternList = await this.getIndexPatternList(); return indexPatternList.length === 0; } @@ -300,13 +305,16 @@ export function SettingsPageProvider({ getService, getPageObjects }) { } } - async createIndexPattern(indexPatternName, timefield = '@timestamp') { + async createIndexPattern(indexPatternName, timefield = '@timestamp', isStandardIndexPattern = true) { await retry.try(async () => { await this.navigateTo(); await PageObjects.header.waitUntilLoadingHasFinished(); await this.clickKibanaIndexPatterns(); await PageObjects.header.waitUntilLoadingHasFinished(); await this.clickOptionalAddNewButton(); + if (!isStandardIndexPattern) { + await this.clickCreateNewRollupButton(); + } await PageObjects.header.waitUntilLoadingHasFinished(); await retry.try(async () => { await this.setIndexPatternField({ indexPatternName }); @@ -340,6 +348,10 @@ export function SettingsPageProvider({ getService, getPageObjects }) { } } + async clickCreateNewRollupButton() { + await testSubjects.click('createRollupIndexPatternButton'); + } + async getIndexPatternIdFromUrl() { const currentUrl = await browser.getCurrentUrl(); const indexPatternId = currentUrl.match(/.*\/(.*)/)[1]; @@ -595,6 +607,26 @@ export function SettingsPageProvider({ getService, getPageObjects }) { await PageObjects.header.waitUntilLoadingHasFinished(); } + async checkImportSucceeded() { + await testSubjects.existOrFail('importSavedObjectsSuccess', { timeout: 20000 }); + } + + async checkNoneImported() { + await testSubjects.existOrFail('importSavedObjectsSuccessNoneImported', { timeout: 20000 }); + } + + async checkImportConflictsWarning() { + await testSubjects.existOrFail('importSavedObjectsConflictsWarning', { timeout: 20000 }); + } + + async checkImportLegacyWarning() { + await testSubjects.existOrFail('importSavedObjectsLegacyWarning', { timeout: 20000 }); + } + + async checkImportFailedWarning() { + await testSubjects.existOrFail('importSavedObjectsFailedWarning', { timeout: 20000 }); + } + async clickImportDone() { await testSubjects.click('importSavedObjectsDoneBtn'); } diff --git a/x-pack/.i18nrc.json b/x-pack/.i18nrc.json index e0ba455552966..735ee0b6b67b5 100644 --- a/x-pack/.i18nrc.json +++ b/x-pack/.i18nrc.json @@ -27,7 +27,7 @@ "xpack.main": "legacy/plugins/xpack_main", "xpack.monitoring": "legacy/plugins/monitoring", "xpack.remoteClusters": "legacy/plugins/remote_clusters", - "xpack.reporting": "legacy/plugins/reporting", + "xpack.reporting": [ "plugins/reporting", "legacy/plugins/reporting" ], "xpack.rollupJobs": "legacy/plugins/rollup", "xpack.searchProfiler": "legacy/plugins/searchprofiler", "xpack.siem": "legacy/plugins/siem", diff --git a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx index 39912ec2ca8b4..477559784bf59 100644 --- a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx +++ b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx @@ -24,7 +24,7 @@ import { MatchedRouteProvider } from '../context/MatchedRouteContext'; export const REACT_APP_ROOT_ID = 'react-apm-root'; -const MainContainer = styled.div` +const MainContainer = styled.main` min-width: ${px(unit * 50)}; padding: ${px(units.plus)}; `; diff --git a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/index.ts b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/index.ts index 894df9dc1c6b9..097aef69d4b4c 100644 --- a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/index.ts +++ b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/index.ts @@ -47,10 +47,9 @@ import { rounddate } from './rounddate'; import { rowCount } from './rowCount'; import { repeatImage } from './repeatImage'; import { revealImage } from './revealImage'; -// TODO: elastic/kibana#44822 Disabling pending filters work -// import { savedMap } from './saved_map'; -// import { savedSearch } from './saved_search'; -// import { savedVisualization } from './saved_visualization'; +import { savedMap } from './saved_map'; +import { savedSearch } from './saved_search'; +import { savedVisualization } from './saved_visualization'; import { seriesStyle } from './seriesStyle'; import { shape } from './shape'; import { sort } from './sort'; @@ -107,10 +106,9 @@ export const functions = [ revealImage, rounddate, rowCount, - // TODO: elastic/kibana#44822 Disabling pending filters work - // savedMap, - // savedSearch, - // savedVisualization, + savedMap, + savedSearch, + savedVisualization, seriesStyle, shape, sort, diff --git a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_map.ts b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_map.ts index 7541924008845..958d9c6a3a6f0 100644 --- a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_map.ts +++ b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_map.ts @@ -35,7 +35,6 @@ interface SavedMapInput extends EmbeddableInput { type Return = EmbeddableExpression; export function savedMap(): ExpressionFunction<'savedMap', Filter | null, Arguments, Return> { - // @ts-ignore elastic/kibana#44822 Disabling pending filters work const { help, args: argHelp } = getFunctionHelp().savedMap; return { name: 'savedMap', diff --git a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_search.ts b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_search.ts index 3159daee75bc6..4c8170bcb08bd 100644 --- a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_search.ts +++ b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_search.ts @@ -23,7 +23,6 @@ interface Arguments { type Return = EmbeddableExpression & { id: SearchInput['id'] }>; export function savedSearch(): ExpressionFunction<'savedSearch', Filter | null, Arguments, Return> { - // @ts-ignore elastic/kibana#44822 Disabling pending filters work const { help, args: argHelp } = getFunctionHelp().savedSearch; return { name: 'savedSearch', diff --git a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_visualization.ts b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_visualization.ts index cd01acd4387be..6b7d06454f44c 100644 --- a/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_visualization.ts +++ b/x-pack/legacy/plugins/canvas/canvas_plugin_src/functions/common/saved_visualization.ts @@ -26,7 +26,6 @@ export function savedVisualization(): ExpressionFunction< Arguments, Return > { - // @ts-ignore elastic/kibana#44822 Disabling pending filters work const { help, args: argHelp } = getFunctionHelp().savedVisualization; return { name: 'savedVisualization', diff --git a/x-pack/legacy/plugins/canvas/i18n/functions/function_help.ts b/x-pack/legacy/plugins/canvas/i18n/functions/function_help.ts index de0d62b3d4453..495415851e118 100644 --- a/x-pack/legacy/plugins/canvas/i18n/functions/function_help.ts +++ b/x-pack/legacy/plugins/canvas/i18n/functions/function_help.ts @@ -213,12 +213,8 @@ export const getFunctionHelp = (): FunctionHelpDict => ({ revealImage, rounddate, rowCount, - // TODO: elastic/kibana#44822 Disabling pending filters work - // @ts-ignore savedMap, - // @ts-ignore savedSearch, - // @ts-ignore savedVisualization, seriesStyle, shape, diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_header.tsx b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_header.tsx index b05e1f5b757c2..31ad0593f58bb 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_header.tsx +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_header.tsx @@ -13,6 +13,7 @@ import { EuiFlexGroup, EuiButtonIcon, EuiButton, + EuiButtonEmpty, EuiOverlayMask, EuiModal, EuiModalFooter, @@ -193,14 +194,11 @@ export class WorkpadHeader extends React.PureComponent { - {/* - TODO: elastic/kibana#44822 Disabling pending filters work {strings.getEmbedObjectButtonLabel()} - */} return { codeUiEnabled: config.get('xpack.code.ui.enabled'), codeIntegrationsEnabled: config.get('xpack.code.integrations.enabled'), + codeDiffPageEnabled: config.get('xpack.code.diffPage.enabled'), }; }, hacks: ['plugins/code/hacks/toggle_app_link_in_nav'], @@ -61,6 +62,9 @@ export const code = (kibana: any) => integrations: Joi.object({ enabled: Joi.boolean().default(false), }).default(), + diffPage: Joi.object({ + enabled: Joi.boolean().default(false), + }).default(), enabled: Joi.boolean().default(true), }) .default() @@ -78,7 +82,7 @@ export const code = (kibana: any) => // Set up with the new platform plugin lifecycle API. const plugin = codePlugin(initializerContext); - plugin.setup(coreSetup); + await plugin.setup(coreSetup, initializerContext.legacy.http); // @ts-ignore const kbnServer = this.kbnServer; diff --git a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx index 68c96b904e98a..ca97c7a091c22 100644 --- a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx +++ b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx @@ -131,7 +131,7 @@ class AdminPage extends React.PureComponent { public render() { return ( -
+
{
- + ); } } diff --git a/x-pack/legacy/plugins/code/public/components/commits/commit.tsx b/x-pack/legacy/plugins/code/public/components/commits/commit.tsx index 31020f6598393..9326fdffe7ef8 100644 --- a/x-pack/legacy/plugins/code/public/components/commits/commit.tsx +++ b/x-pack/legacy/plugins/code/public/components/commits/commit.tsx @@ -16,12 +16,13 @@ import { EuiCopy, EuiTitle, } from '@elastic/eui'; +import chrome from 'ui/chrome'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; import { CommitInfo } from '../../../model/commit'; -import { PathTypes } from '../../common/types'; import { RepositoryUtils } from '../../../common/repository_utils'; import { parseCommitMessage } from '../../../common/commit_utils'; +import { PathTypes } from '../../common/types'; const COMMIT_ID_LENGTH = 8; @@ -54,8 +55,17 @@ const revisionLinkLabel = i18n.translate('xpack.code.commits.revisionLinkAriaLab defaultMessage: 'View the project at this commit', }); +const getRevisionPath = (repoUri: string, commitId: string) => { + const diffPageEnabled = chrome.getInjected('codeDiffPageEnabled'); + if (diffPageEnabled) { + return `#/${repoUri}/commit/${commitId}`; + } else { + return `#/${repoUri}/${PathTypes.tree}/${commitId}`; + } +}; + const CommitActions = ({ commitId, repoUri }: ActionProps) => { - const revisionPath = `#/${repoUri}/${PathTypes.tree}/${commitId}`; + const revisionPath = getRevisionPath(repoUri, commitId); return (
diff --git a/x-pack/legacy/plugins/code/public/components/commits/commit_link.tsx b/x-pack/legacy/plugins/code/public/components/commits/commit_link.tsx deleted file mode 100644 index c2e79a5fd7490..0000000000000 --- a/x-pack/legacy/plugins/code/public/components/commits/commit_link.tsx +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -import { EuiBadge /* , EuiLink*/ } from '@elastic/eui'; -import React from 'react'; -// import { DIFF } from '../routes'; - -interface Props { - repoUri: string; - commit: string; - children?: any; -} - -export const CommitLink = ({ repoUri, commit, children }: Props) => { - // const href = DIFF.replace(':resource/:org/:repo', repoUri).replace(':commitId', commit); - return ( - // - {children || commit} - // - ); -}; diff --git a/x-pack/legacy/plugins/code/public/components/commits/index.ts b/x-pack/legacy/plugins/code/public/components/commits/index.ts index bd751bc90dd3b..868597cc5e9c4 100644 --- a/x-pack/legacy/plugins/code/public/components/commits/index.ts +++ b/x-pack/legacy/plugins/code/public/components/commits/index.ts @@ -5,6 +5,5 @@ */ export { Commit } from './commit'; -export { CommitLink } from './commit_link'; export { CommitGroup } from './group'; export { CommitHistory, CommitHistoryComponent } from './history'; diff --git a/x-pack/legacy/plugins/code/public/components/diff_page/accordion.tsx b/x-pack/legacy/plugins/code/public/components/diff_page/accordion.tsx new file mode 100644 index 0000000000000..2d5bf8e63a437 --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/diff_page/accordion.tsx @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useState } from 'react'; +import { EuiIcon, EuiFlexGroup, EuiFlexItem, EuiPanel } from '@elastic/eui'; + +interface Props { + initialIsOpen: boolean; + title: React.ReactNode; + children: React.ReactNode; + className: string; +} + +export const Accordion = (props: Props) => { + const [isOpen, setOpen] = useState(props.initialIsOpen); + return ( + + { + setOpen(!isOpen); + }} + > + {props.title} + + + + + + + ); +}; diff --git a/x-pack/legacy/plugins/code/public/components/diff_page/diff.scss b/x-pack/legacy/plugins/code/public/components/diff_page/diff.scss index c402660599f57..c10feae4f9dab 100644 --- a/x-pack/legacy/plugins/code/public/components/diff_page/diff.scss +++ b/x-pack/legacy/plugins/code/public/components/diff_page/diff.scss @@ -1,16 +1,48 @@ +.codeDiffCommitSelectorsContainer { + border-left: $euiBorderThin solid $euiColorLightShade; + padding-left: $euiSize; +} + +.codeDiffCommitMessage { + padding: $euiSizeS $euiSize; +} + +.codeDiffChangedFiles { + color: $euiColorPrimary; +} + +.codeDiffMetadata { + padding: 0 $euiSize $euiSizeS $euiSize; +} +.codeVisualizerIcon { + margin-right: $euiSizeS; +} + +.codeDiffDeletion { + margin: 0 $euiSizeS; + background-color: $euiColorVis0; +} -.diff > button.euiAccordion__button > div:first-child { - flex-direction: row-reverse; - padding: $euiSize $euiSizeS; +.codeDiff__panel { + padding: 0 $euiSize; } -.diff > button.euiAccordion__button { - &:hover { - text-decoration: none; - } +.codeDiff__header { + border-bottom: $euiBorderThin solid $euiColorLightShade; + height: $euiSizeXXL; + padding: $euiSize; } -.euiAccordion__iconWrapper { +.codeAccordionCollapse__icon { + margin: auto $euiSize auto 0; cursor: pointer; } + +.codeViewFile__button { + height: $euiSizeL !important; +} + +.codeAccordion { + margin-bottom: $euiSizeM; +} diff --git a/x-pack/legacy/plugins/code/public/components/diff_page/diff.tsx b/x-pack/legacy/plugins/code/public/components/diff_page/diff.tsx index 5aa840fdbc782..98a694c2542d1 100644 --- a/x-pack/legacy/plugins/code/public/components/diff_page/diff.tsx +++ b/x-pack/legacy/plugins/code/public/components/diff_page/diff.tsx @@ -4,99 +4,33 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiAccordion, EuiFlexGroup, EuiFlexItem, EuiIcon, EuiText, EuiTitle } from '@elastic/eui'; -import theme from '@elastic/eui/dist/eui_theme_light.json'; -import React, { MouseEvent } from 'react'; +import { + EuiFlexGroup, + EuiFlexItem, + EuiIcon, + EuiText, + EuiBadge, + EuiNotificationBadge, +} from '@elastic/eui'; +import React from 'react'; import { connect } from 'react-redux'; import { Link, RouteComponentProps, withRouter } from 'react-router-dom'; -import styled from 'styled-components'; import { CommitDiff, FileDiff } from '../../../common/git_diff'; -import { SearchScope } from '../../../model'; +import { SearchScope, SearchOptions } from '../../../model'; import { changeSearchScope } from '../../actions'; import { RootState } from '../../reducers'; -// import { SearchBar } from '../search_bar'; +import { SearchBar } from '../search_bar'; import { DiffEditor } from './diff_editor'; +import { Accordion } from './accordion'; const COMMIT_ID_LENGTH = 16; -const B = styled.b` - font-weight: bold; -`; - -const PrimaryB = styled(B)` - color: ${theme.euiColorPrimary}; -`; - -const CommitId = styled.span` - display: inline-block; - padding: 0 ${theme.paddingSizes.xs}; - border: ${theme.euiBorderThin}; -`; - -const Addition = styled.div` - padding: ${theme.paddingSizes.xs} ${theme.paddingSizes.s}; - border-radius: ${theme.euiSizeXS}; - color: white; - margin-right: ${theme.euiSizeS}; - background-color: ${theme.euiColorDanger}; -`; - -const Deletion = styled(Addition)` - background-color: ${theme.euiColorVis0}; -`; - -const Container = styled.div` - padding: ${theme.paddingSizes.xs} ${theme.paddingSizes.m}; -`; - -const TopBarContainer = styled.div` - height: calc(48rem / 14); - border-bottom: ${theme.euiBorderThin}; - padding: 0 ${theme.paddingSizes.m}; - display: flex; - flex-direction: row; - justify-content: space-between; -`; - -// @types/styled-components@3.0.1 does not yet support `defaultProps`, which EuiAccordion uses -// Ref: https://github.com/DefinitelyTyped/DefinitelyTyped/pull/31903 -// const Accordion = styled(EuiAccordion)` -// border: ${theme.euiBorderThick}; -// border-radius: ${theme.euiSizeS}; -// margin-bottom: ${theme.euiSize}; -// `; -const accordionStyles = { - border: theme.euiBorderThick, - borderRadius: theme.euiSizeS, - marginBottom: theme.euiSize, -}; - -const Icon = styled(EuiIcon)` - margin-right: ${theme.euiSizeS}; -`; - -const Parents = styled.div` - border-left: ${theme.euiBorderThin}; - height: calc(32rem / 14); - line-height: calc(32rem / 14); - padding-left: ${theme.paddingSizes.s}; - margin: ${theme.euiSizeS} 0; -`; - -const H4 = styled.h4` - height: 100%; - line-height: calc(48rem / 14); -`; - -const ButtonContainer = styled.div` - cursor: default; -`; - interface Props extends RouteComponentProps<{ resource: string; org: string; repo: string }> { commit: CommitDiff | null; query: string; onSearchScopeChanged: (s: SearchScope) => void; repoScope: string[]; + searchOptions: SearchOptions; } export enum DiffLayout { @@ -104,37 +38,48 @@ export enum DiffLayout { Split, } -const onClick = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); -}; - -const Difference = (props: { fileDiff: FileDiff; repoUri: string; revision: string }) => ( - - - - - {props.fileDiff.additions} - {props.fileDiff.deletions} - - - {props.fileDiff.path} - -
- - - View File - - -
-
-
- +const Difference = (props: { + fileDiff: FileDiff; + repoUri: string; + revision: string; + initialIsOpen: boolean; +}) => ( + + + + + + {props.fileDiff.additions} + + {props.fileDiff.deletions} + + + + {props.fileDiff.path} + + + +
+ + + View File + + +
+
+ } > -
+ ); export class DiffPage extends React.Component { @@ -160,6 +105,7 @@ export class DiffPage extends React.Component { }; public render() { + const DEFAULT_OPEN_FILE_DIFF_COUNT = 1; const { commit, match } = this.props; const { repo, org, resource } = match.params; const repoUri = `${resource}/${org}/${repo}`; @@ -167,68 +113,46 @@ export class DiffPage extends React.Component { return null; } const { additions, deletions, files } = commit; - const { parents } = commit.commit; - const title = commit.commit.message.split('\n')[0]; - let parentsLinks = null; - if (parents.length > 1) { - const [p1, p2] = parents; - parentsLinks = ( - - {p1}+ - {p2} - - ); - } else if (parents.length === 1) { - parentsLinks = {parents[0]}; - } - const topBar = ( - -
- -

{title}

-
-
-
- Parents: {parentsLinks} -
-
- ); const fileCount = files.length; - const diffs = commit.files.map(file => ( - + const diffs = commit.files.map((file, index) => ( + )); return ( -
- {/* + */} - {topBar} - - {commit.commit.message} - - - - - - - Showing - {fileCount} Changed files - with - {additions} additions and {deletions} deletions - - - - - Committed by - {commit.commit.committer} - {commit.commit.id.substr(0, COMMIT_ID_LENGTH)} - - - - - {diffs} + searchOptions={this.props.searchOptions} + enableSubmitWhenOptionsChanged={false} + /> +
+ {commit.commit.message} +
+ + + + + Showing + {fileCount} Changed files + with + {additions} additions and {deletions} deletions + + + + + Committed by + {commit.commit.committer} + {commit.commit.id.substr(0, COMMIT_ID_LENGTH)} + + + +
{diffs}
); } @@ -238,6 +162,7 @@ const mapStateToProps = (state: RootState) => ({ commit: state.commit.commit, query: state.search.query, repoScope: state.search.searchOptions.repoScope.map(r => r.uri), + searchOptions: state.search.searchOptions, }); const mapDispatchToProps = { diff --git a/x-pack/legacy/plugins/code/public/components/diff_page/diff_editor.tsx b/x-pack/legacy/plugins/code/public/components/diff_page/diff_editor.tsx index 01b77d250b161..e02d54a729192 100644 --- a/x-pack/legacy/plugins/code/public/components/diff_page/diff_editor.tsx +++ b/x-pack/legacy/plugins/code/public/components/diff_page/diff_editor.tsx @@ -16,6 +16,17 @@ interface Props { } export class DiffEditor extends React.Component { + lineHeight = 18; + static linesCount(s: string = '') { + let count = 0; + let position = 0; + while (position !== -1) { + count++; + position = position + 1; + position = s.indexOf('\n', position); + } + return count; + } private diffEditor: MonacoDiffEditor | null = null; public mountDiffEditor = (container: HTMLDivElement) => { this.diffEditor = new MonacoDiffEditor( @@ -28,6 +39,12 @@ export class DiffEditor extends React.Component { this.diffEditor.init(); }; + getEditorHeight = () => { + const originalLinesCount = DiffEditor.linesCount(this.props.originCode); + const modifiedLinesCount = DiffEditor.linesCount(this.props.modifiedCode); + return Math.min(Math.max(originalLinesCount, modifiedLinesCount) * this.lineHeight, 400); + }; + public componentDidUpdate(prevProps: Props) { if (prevProps.renderSideBySide !== this.props.renderSideBySide) { this.updateLayout(this.props.renderSideBySide); @@ -39,6 +56,13 @@ export class DiffEditor extends React.Component { } public render() { - return
; + return ( +
+ ); } } diff --git a/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.scss b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.scss new file mode 100644 index 0000000000000..90f28229480a3 --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.scss @@ -0,0 +1,29 @@ +.codeFlyout__subHeader { + display: flex; + flex-direction: column; + justify-content: center; +} + +.codeFlyout__fileHeader { + position: absolute; + display: flex; + align-items: center; + z-index: 1; +} + +.codeFlyout__fileLink { + font-weight: $euiFontWeightBold; +} + +.codeFlyout__icon { + margin: $euiSizeS; +} + +.codeFlyout__tabs { + justify-content: flex-end; +} + +.codeFlyout__content { + display: flex; + height: 100%; +} diff --git a/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.tsx b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.tsx new file mode 100644 index 0000000000000..ed023aa8a52c6 --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout.tsx @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; +import { EuiFlyout } from '@elastic/eui'; +import { CodeFlyoutMain } from './code_flyout_main'; + +export const CodeFlyout = (props: { + repo: string; + file: string; + revision: string; + open: boolean; + onClose: () => void; +}) => { + if (props.open) { + return ( + + + + ); + } else { + return null; + } +}; diff --git a/x-pack/legacy/plugins/code/public/components/integrations/code_flyout_main.tsx b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout_main.tsx new file mode 100644 index 0000000000000..7468fc15bafc2 --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/integrations/code_flyout_main.tsx @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Fragment, ReactNode } from 'react'; +import { + EuiTitle, + EuiFlyoutHeader, + EuiText, + EuiTabs, + EuiTab, + EuiIcon, + EuiLink, +} from '@elastic/eui'; +import { CodeViewer } from './code_viewer'; +import { History } from './history'; +import { absoluteCodeFileURI } from './helpers'; + +enum Tab { + code, + history, + blame, +} + +export const CodeFlyoutMain = (props: { repo: string; file: string; revision: string }) => { + const [selectedTab, setSelectedTab] = React.useState(Tab.code); + + let content: ReactNode; + + switch (selectedTab) { + case Tab.blame: + content = ( + + ); + break; + case Tab.history: + content = ; + break; + case Tab.code: + content = ( + + ); + break; + } + + return ( + + + +

File Preview

+
+
+
+
+ + + + {props.file} + + +
+ + setSelectedTab(Tab.code)} isSelected={selectedTab === Tab.code}> + Code + + setSelectedTab(Tab.history)} + isSelected={selectedTab === Tab.history} + > + History + + setSelectedTab(Tab.blame)} isSelected={selectedTab === Tab.blame}> + Blame + + +
+
{content}
+
+ ); +}; diff --git a/x-pack/legacy/plugins/code/public/components/integrations/code_viewer.tsx b/x-pack/legacy/plugins/code/public/components/integrations/code_viewer.tsx new file mode 100644 index 0000000000000..ae14ece21b245 --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/integrations/code_viewer.tsx @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Component } from 'react'; +import { editor } from 'monaco-editor'; +import { ResizeChecker } from 'ui/resize_checker'; +import { + EuiFlexGroup, + EuiProgress, + EuiPage, + EuiPageBody, + EuiPageContent, + EuiPageContentHeader, + EuiPageContentHeaderSection, + EuiPageContentBody, +} from '@elastic/eui'; +import { monaco } from '../../monaco/monaco'; +import { requestFile } from '../../sagas/file'; +import { GitBlame } from '../../../common/git_blame'; +import { BlameWidget } from '../../monaco/blame/blame_widget'; +import { requestBlame } from '../../sagas/blame'; + +export interface Props { + repo: string; + file: string; + revision: string; + showBlame?: boolean; +} + +interface State { + loading: boolean; +} + +export class CodeViewer extends Component { + public readonly state: State = { + loading: true, + }; + public blameWidgets: any; + + private ed?: editor.IStandaloneCodeEditor; + private lineDecorations: string[] | null = null; + private resizeChecker?: ResizeChecker; + private viewerRef = React.createRef(); + + public componentDidMount(): void { + this.tryLoadFile(this.props); + } + + public componentWillUnmount(): void { + if (this.ed) { + this.ed.dispose(); + this.destroyBlameWidgets(); + } + } + + private async tryLoadFile({ file, revision, repo, showBlame }: Props) { + this.setState({ loading: true }); + const { content, lang } = await requestFile({ + path: file, + revision, + uri: repo, + }); + try { + await monaco.editor.colorize(content!, lang!, {}); + this.loadFile(content!, lang); + } catch (e) { + this.loadFile(content!); + } + if (showBlame) { + const blames: GitBlame[] = await requestBlame(repo, revision, file); + this.loadBlame(blames); + } + this.setState({ loading: false }); + } + + public loadBlame(blames: GitBlame[]) { + if (this.blameWidgets) { + this.destroyBlameWidgets(); + } + if (!this.lineDecorations) { + this.lineDecorations = this.ed!.deltaDecorations( + [], + [ + { + range: new monaco.Range(1, 1, Infinity, 1), + options: { isWholeLine: true, linesDecorationsClassName: 'code-line-decoration' }, + }, + ] + ); + } + this.blameWidgets = blames.map((b, index) => { + return new BlameWidget(b, index === 0, this.ed!); + }); + } + + public destroyBlameWidgets() { + if (this.blameWidgets) { + this.blameWidgets.forEach((bw: BlameWidget) => bw.destroy()); + } + if (this.lineDecorations) { + this.ed!.deltaDecorations(this.lineDecorations!, []); + this.lineDecorations = null; + } + this.blameWidgets = null; + if (this.resizeChecker) { + this.resizeChecker.destroy(); + } + } + + private loadFile(code: string, language: string = 'text') { + const container = this.viewerRef.current!; + this.ed = monaco.editor.create(container, { + value: code, + language, + readOnly: true, + minimap: { + enabled: false, + }, + hover: { + enabled: false, + }, + contextmenu: false, + selectOnLineNumbers: false, + selectionHighlight: false, + renderLineHighlight: 'none', + scrollBeyondLastLine: false, + renderIndentGuides: false, + automaticLayout: false, + lineDecorationsWidth: this.props.showBlame ? 316 : 16, + }); + this.resizeChecker = new ResizeChecker(container); + this.resizeChecker.on('resize', () => { + setTimeout(() => { + this.ed!.layout(); + }); + }); + } + + renderFileLoadingIndicator = () => { + const fileName = this.props.file; + return ( + + + + + +

{fileName} is loading...

+
+
+ + + +
+
+
+ ); + }; + + render() { + return ( + + {this.state.loading && this.renderFileLoadingIndicator()} +
+ + ); + } +} diff --git a/x-pack/legacy/plugins/code/public/components/integrations/helpers.ts b/x-pack/legacy/plugins/code/public/components/integrations/helpers.ts index 6b55a5e2a0f45..9913ab80a6f65 100644 --- a/x-pack/legacy/plugins/code/public/components/integrations/helpers.ts +++ b/x-pack/legacy/plugins/code/public/components/integrations/helpers.ts @@ -7,3 +7,9 @@ // TODO(rylnd): make this an actual external link export const externalFileURI: (repoUri: string, filePath: string) => string = (uri, path) => `/${uri}/blob/HEAD/${path}`; + +export const absoluteCodeFileURI: ( + repoUri: string, + filePath: string, + revision: string +) => string = (uri, path, revision = 'HEAD') => `/app/code#/${uri}/blob/${revision}/${path}`; diff --git a/x-pack/legacy/plugins/code/public/components/integrations/history.tsx b/x-pack/legacy/plugins/code/public/components/integrations/history.tsx new file mode 100644 index 0000000000000..c867bb31b657d --- /dev/null +++ b/x-pack/legacy/plugins/code/public/components/integrations/history.tsx @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; +import { EuiTitle } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { CommitHistoryComponent } from '../commits'; +import { requestCommits } from '../../sagas/file'; +import { CommitInfo } from '../../../model/commit'; + +const PAGE_SIZE = 20; + +export const History = (props: { repo: string; file: string; revision: string }) => { + const [loading, setLoading] = React.useState(true); + const [commits, setCommits] = React.useState([]); + const [hasMore, setHasMore] = React.useState(false); + + const fetchCommits = async (loadMore: boolean) => { + setLoading(true); + const revision = loadMore ? commits[commits.length - 1].id : props.revision; + const newCommits = await requestCommits( + { uri: props.repo, revision }, + props.file, + loadMore, + PAGE_SIZE + ); + setLoading(false); + setHasMore(newCommits.length >= PAGE_SIZE); + setCommits(commits.concat(newCommits)); + return newCommits; + }; + + React.useEffect(() => { + fetchCommits(false).then(setCommits); + }, []); + + return ( + fetchCommits(true)} + loadingCommits={loading} + hasMoreCommit={hasMore} + header={ + +

+ +

+
+ } + showPagination={true} + repoUri={props.repo} + /> + ); +}; diff --git a/x-pack/legacy/plugins/code/public/components/integrations/index.tsx b/x-pack/legacy/plugins/code/public/components/integrations/index.tsx index e8a7ae25de3de..81e53398b851e 100644 --- a/x-pack/legacy/plugins/code/public/components/integrations/index.tsx +++ b/x-pack/legacy/plugins/code/public/components/integrations/index.tsx @@ -8,62 +8,89 @@ import React from 'react'; import { EuiFlexGroup, EuiPanel, EuiText } from '@elastic/eui'; import { CodeBlock } from '../code_block'; -import { history } from '../../utils/url'; import { FrameHeader } from './frame_header'; import { RepoTitle } from './repo_title'; import { CodeIntegrator } from './code_integrator'; -import { externalFileURI } from './helpers'; import { frames, Frame, repos, results } from './data'; +import { CodeFlyout } from './code_flyout'; const associateToService = (frame: Frame) => (repo: string) => alert(`repo ${repo} associated with service ${JSON.stringify(frame)}`); const handleImport = (repo: string) => alert(`import done: ${repo}`); -export const Integrations = () => ( -
- {frames.map(frame => { - const { fileName, lineNumber } = frame; - const key = `${fileName}#L${lineNumber}`; - const snippet = results[key]; +export const Integrations = () => { + const [isFlyoutVisible, setVisible] = React.useState(false); + const [flyoutFile, setflyoutFile] = React.useState({ + repo: 'github.com/Microsoft/TypeScript-Node-Starter', + file: 'src/app.ts', + revision: 'master', + }); - if (snippet) { - const { compositeContent, filePath, language, uri } = snippet; - const { content, lineMapping } = compositeContent; - const fileUrl = externalFileURI(uri, filePath); - const lines = content.split('\n'); + const closeFlyout = () => { + setVisible(false); + }; + + const showFlyout = (repo: string, file: string, revision: string = 'master') => { + setflyoutFile({ + repo, + file, + revision, + }); + setVisible(true); + }; + + return ( +
+ + {frames.map(frame => { + const { fileName, lineNumber } = frame; + const key = `${fileName}#L${lineNumber}`; + const snippet = results[key]; + + if (snippet) { + const { compositeContent, filePath, language, uri } = snippet; + const { content, lineMapping } = compositeContent; + const lines = content.split('\n'); + + return ( +
+ + + showFlyout(uri, filePath)} + /> + lineMapping[i]} /> + +
+ ); + } return (
- - - history.push(fileUrl)} + + + {fileName} + at + line {lineNumber} + + - lineMapping[i]} /> - +
); - } - - return ( -
- - - {fileName} - at - line {lineNumber} - - - -
- ); - })} -
-); + })} +
+ ); +}; diff --git a/x-pack/legacy/plugins/code/public/components/integrations/integrations.scss b/x-pack/legacy/plugins/code/public/components/integrations/integrations.scss index 98330c4060826..1def1c4c85b05 100644 --- a/x-pack/legacy/plugins/code/public/components/integrations/integrations.scss +++ b/x-pack/legacy/plugins/code/public/components/integrations/integrations.scss @@ -41,3 +41,6 @@ margin-bottom: 1rem; width: 300px; } + + +@import "./code_flyout.scss"; diff --git a/x-pack/legacy/plugins/code/public/monaco/monaco_diff_editor.ts b/x-pack/legacy/plugins/code/public/monaco/monaco_diff_editor.ts index 12cb75825cb64..bd0daa5e5684c 100644 --- a/x-pack/legacy/plugins/code/public/monaco/monaco_diff_editor.ts +++ b/x-pack/legacy/plugins/code/public/monaco/monaco_diff_editor.ts @@ -7,7 +7,7 @@ import { ResizeChecker } from '../components/shared/resize_checker'; import { monaco } from './monaco'; export class MonacoDiffEditor { - public diffEditor: monaco.editor.IDiffEditor | null = null; + public diffEditor: monaco.editor.IStandaloneDiffEditor | null = null; private resizeChecker: ResizeChecker | null = null; constructor( private readonly container: HTMLElement, @@ -26,6 +26,22 @@ export class MonacoDiffEditor { enableSplitViewResizing: false, renderSideBySide: this.renderSideBySide, scrollBeyondLastLine: false, + readOnly: true, + minimap: { + enabled: false, + }, + hover: { + enabled: false, // disable default hover; + }, + occurrencesHighlight: false, + selectionHighlight: false, + renderLineHighlight: 'none', + contextmenu: false, + folding: true, + renderIndentGuides: false, + automaticLayout: false, + lineDecorationsWidth: 16, + overviewRulerBorder: false, }); this.resizeChecker = new ResizeChecker(this.container); this.resizeChecker.on('resize', () => { @@ -38,6 +54,12 @@ export class MonacoDiffEditor { modified: modifiedModel, }); this.diffEditor = diffEditor; + const navi = monaco.editor.createDiffNavigator(diffEditor, { + followsCaret: true, + ignoreCharChanges: true, + }); + diffEditor.focus(); + navi.next(); }); } } diff --git a/x-pack/legacy/plugins/code/public/monaco/override_monaco_styles.scss b/x-pack/legacy/plugins/code/public/monaco/override_monaco_styles.scss index fbd9e8ad61dec..394f24bce37af 100644 --- a/x-pack/legacy/plugins/code/public/monaco/override_monaco_styles.scss +++ b/x-pack/legacy/plugins/code/public/monaco/override_monaco_styles.scss @@ -1,4 +1,5 @@ .codeContainer__monaco { + .monaco-diff-editor .cursors-layer > .cursor, .monaco-editor .cursors-layer > .cursor { display: none !important; } @@ -7,6 +8,7 @@ display: none !important; } + .monaco-diff-editor.mac .margin-view-overlays .line-numbers, .monaco-editor.mac .margin-view-overlays .line-numbers { cursor: pointer; color: $euiColorMediumShade; @@ -25,4 +27,18 @@ span.mtk29 { color: $euiColorAccent; } + + .diagonal-fill { + background: none; + } + + .monaco-diff-editor .line-insert, + .monaco-diff-editor .char-insert { + background-color: scale($euiColorVis0, -0.9); + } + + .monaco-diff-editor .line-delete, + .monaco-diff-editor .char-delete { + background-color: scale($euiColorDanger, -93%); + } } diff --git a/x-pack/legacy/plugins/code/public/sagas/blame.ts b/x-pack/legacy/plugins/code/public/sagas/blame.ts index 33b356a893d67..c5a39d4931137 100644 --- a/x-pack/legacy/plugins/code/public/sagas/blame.ts +++ b/x-pack/legacy/plugins/code/public/sagas/blame.ts @@ -11,7 +11,7 @@ import { Match } from '../actions'; import { loadBlame, loadBlameFailed, LoadBlamePayload, loadBlameSuccess } from '../actions/blame'; import { blamePattern } from './patterns'; -function requestBlame(repoUri: string, revision: string, path: string) { +export function requestBlame(repoUri: string, revision: string, path: string) { return npStart.core.http.get( `/api/code/repo/${repoUri}/blame/${encodeURIComponent(revision)}/${path}` ); diff --git a/x-pack/legacy/plugins/code/public/sagas/file.ts b/x-pack/legacy/plugins/code/public/sagas/file.ts index 568a2d38c8d14..42bb44d266306 100644 --- a/x-pack/legacy/plugins/code/public/sagas/file.ts +++ b/x-pack/legacy/plugins/code/public/sagas/file.ts @@ -167,7 +167,7 @@ function* handleFetchTreeCommits(action: Action, signal: Abort } } -function requestCommits( +export function requestCommits( { uri, revision }: FetchRepoPayloadWithRevision, path?: string, loadMore?: boolean, diff --git a/x-pack/legacy/plugins/code/public/style/_layout.scss b/x-pack/legacy/plugins/code/public/style/_layout.scss index 8e6bc23b8eb18..792cb0944cf3e 100644 --- a/x-pack/legacy/plugins/code/public/style/_layout.scss +++ b/x-pack/legacy/plugins/code/public/style/_layout.scss @@ -106,6 +106,10 @@ margin-top: $euiSize; } +.codeViewer { + flex-grow: 1; +} + .codeSidebar__container { background-color: $euiColorLightestShade; border-right: solid 1px $euiBorderColor; diff --git a/x-pack/legacy/plugins/code/public/style/_monaco.scss b/x-pack/legacy/plugins/code/public/style/_monaco.scss index 51c8308ce42a7..b04721251a09d 100644 --- a/x-pack/legacy/plugins/code/public/style/_monaco.scss +++ b/x-pack/legacy/plugins/code/public/style/_monaco.scss @@ -1,22 +1,24 @@ .codeSearch__highlight { - background-color: $euiColorVis5; - color: black !important; - padding: $euiSizeXS / 2; - border-radius: $euiSizeXS / 2; - font-weight: bold; - font-style: oblique; - cursor: pointer; + background-color: $euiColorVis5; + color: black !important; + padding: $euiSizeXS / 2; + border-radius: $euiSizeXS / 2; + font-weight: bold; + font-style: oblique; + cursor: pointer; } .codeBlock__line--highlighted { background-color: $euiColorLightShade; } +.monaco-diff-editor .margin-view-overlays .line-numbers, .monaco-editor .margin-view-overlays .line-numbers { text-align: center; border-right: $euiBorderThin; } +.monaco-diff-editor-hover, .monaco-editor-hover { min-width: 350px; border: $euiBorderThin; @@ -38,10 +40,12 @@ @include euiBottomShadow; } +.monaco-diff-editor-hover .hover-row, .monaco-editor-hover .hover-row { padding: 4px 5px; } +.monaco-diff-editor-hover .button-group, .monaco-editor-hover .button-group { background: linear-gradient(-180deg, $euiColorLightestShade 0%, $euiColorEmptyShade 100%); border-radius: 0 0 4px 4px; @@ -49,10 +53,12 @@ height: 33px; } +.monaco-diff-editor-hover .button-group button:not(:first-child), .monaco-editor-hover .button-group button:not(:first-child) { border-left: 1px solid $euiBorderColor; } +.monaco-diff-editor-hover .button-group button, .monaco-editor-hover .button-group button { font-size: 13px; font-weight: normal; @@ -61,6 +67,7 @@ flex: 1; } +.monaco-diff-editor .scroll-decoration, .monaco-editor .scroll-decoration { display: none; } diff --git a/x-pack/legacy/plugins/code/server/__tests__/lsp_service.ts b/x-pack/legacy/plugins/code/server/__tests__/lsp_service.ts index 4c85eb49968d7..6480402db9827 100644 --- a/x-pack/legacy/plugins/code/server/__tests__/lsp_service.ts +++ b/x-pack/legacy/plugins/code/server/__tests__/lsp_service.ts @@ -85,6 +85,7 @@ describe('lsp_service tests', () => { } const repoUri = 'github.com/test/test_repo'; + const mockRndPath = '__random'; // @ts-ignore before(async () => { @@ -101,7 +102,7 @@ describe('lsp_service tests', () => { function mockLspService() { const esClient = mockEsClient(); - return new LspService( + const service = new LspService( '127.0.0.1', serverOptions, gitOps, @@ -110,6 +111,9 @@ describe('lsp_service tests', () => { new ConsoleLoggerFactory(), new RepositoryConfigController(esClient) ); + // @ts-ignore + service.workspaceHandler.randomPath = () => 'random'; + return service; } async function sendHoverRequest(lspservice: LspService, revision: string) { @@ -145,13 +149,13 @@ describe('lsp_service tests', () => { ctrlSpy.restore(); const workspaceFolderExists = fs.existsSync( - path.join(serverOptions.workspacePath, repoUri, revision) + path.join(serverOptions.workspacePath, repoUri, mockRndPath, revision) ); // workspace is opened assert.ok(workspaceFolderExists); const workspacePath = fs.realpathSync( - path.resolve(serverOptions.workspacePath, repoUri, revision) + path.resolve(serverOptions.workspacePath, repoUri, mockRndPath, revision) ); // workspace handler is working, filled workspacePath sinon.assert.calledWith( @@ -177,7 +181,12 @@ describe('lsp_service tests', () => { // send a dummy request to open a workspace; const response = await sendHoverRequest(lspservice, revision); assert.ok(response); - const workspacePath = path.resolve(serverOptions.workspacePath, repoUri, revision); + const workspacePath = path.resolve( + serverOptions.workspacePath, + repoUri, + mockRndPath, + revision + ); const workspaceFolderExists = fs.existsSync(workspacePath); // workspace is opened assert.ok(workspaceFolderExists); @@ -216,7 +225,12 @@ describe('lsp_service tests', () => { // send a dummy request to open a workspace; const response = await sendHoverRequest(lspservice, revision); assert.ok(response); - const workspacePath = path.resolve(serverOptions.workspacePath, repoUri, revision); + const workspacePath = path.resolve( + serverOptions.workspacePath, + repoUri, + mockRndPath, + revision + ); const git = simplegit(workspacePath); const workspaceCommit = await git.revparse(['HEAD']); // workspace is newest now diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts index 6d70c8386c31d..9d168e604c1b3 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import util from 'util'; import Boom from 'boom'; import { ServiceHandlerAdapter, ServiceRegisterOptions } from '../service_handler_adapter'; @@ -48,7 +48,7 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter { private readonly nonCodeAdapter: NonCodeNodeAdapter = new NonCodeNodeAdapter('', this.log); constructor( - private readonly server: CodeServerRouter, + private readonly router: CodeServerRouter, private readonly log: Logger, serverOptions: ServerOptions, esClient: EsClient @@ -113,17 +113,25 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter { const d = serviceDefinition[method]; const path = `${options.routePrefix}/${d.routePath || method}`; - this.server.route({ + this.router.route({ method: 'post', path, - handler: async (req: Request) => { - const { context, params } = req.payload as RequestPayload; + npHandler: async ( + ctx: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { + const { context, params } = req.body as RequestPayload; this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`); try { const data = await localHandler(params, context); - return { data }; + return res.ok({ body: { data } }); } catch (e) { - throw Boom.boomify(e); + if (Boom.isBoom(e)) { + throw e; + } else { + throw Boom.boomify(e, { statusCode: 500 }); + } } }, }); diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts index adb7e9b93fbad..e23b5a9027e75 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { LocalEndpoint } from '../local_endpoint'; import { CodeNode } from './code_nodes'; export class ClusterNodeEndpoint extends LocalEndpoint { constructor( - public readonly httpRequest: Request, + public readonly httpRequest: KibanaRequest, public readonly resource: string, public readonly codeNode: CodeNode ) { diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts index 27f5c57214112..6ac0b830905bb 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import Boom from 'boom'; import { Endpoint, ResourceLocator } from '../resource_locator'; import { ClusterService } from './cluster_service'; @@ -26,7 +26,7 @@ export class ClusterResourceLocator implements ResourceLocator { return RepositoryUtils.buildRepository(url).uri; } - async locate(req: Request, resource: string): Promise { + async locate(req: KibanaRequest, resource: string): Promise { // to be compatible with if (resource.trim() === '') { return new LocalEndpoint(req, resource); @@ -58,7 +58,7 @@ export class ClusterResourceLocator implements ResourceLocator { /** * Return undefined to let NodeRepositoriesService enqueue the clone job in cluster mode. */ - async allocate(req: Request, resource: string): Promise { + async allocate(req: KibanaRequest, resource: string): Promise { // make the cluster service synchronize the meta data and allocate new resources to nodes await this.clusterService.pollClusterState(); return undefined; diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts index 5f5319730c258..bcc2e7b21e672 100644 --- a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts +++ b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request, Server } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; +import { httpServiceMock, httpServerMock } from 'src/core/server/mocks'; import { createTestHapiServer } from '../test_utils'; import { LocalHandlerAdapter } from './local_handler_adapter'; import { CodeServerRouter } from '../security'; @@ -17,12 +18,13 @@ import { Logger } from '../log'; import { ConsoleLoggerFactory } from '../utils/console_logger_factory'; const log: Logger = new ConsoleLoggerFactory().getLogger(['test']); -let hapiServer: Server = createTestHapiServer(); +let hapiServer = createTestHapiServer(); -let server: CodeServerRouter = new CodeServerRouter(hapiServer); +const routerMock = httpServiceMock.createRouter(); +let router: CodeServerRouter = new CodeServerRouter(routerMock); beforeEach(async () => { hapiServer = createTestHapiServer(); - server = new CodeServerRouter(hapiServer); + router = new CodeServerRouter(routerMock); }); const TestDefinition = { test1: { @@ -49,13 +51,13 @@ test('local adapter should work', async () => { const services = new CodeServices(new LocalHandlerAdapter()); services.registerHandler(TestDefinition, testServiceHandler); const testApi = services.serviceFor(TestDefinition); - const endpoint = await services.locate({} as Request, ''); + const endpoint = await services.locate(httpServerMock.createKibanaRequest(), ''); const { result } = await testApi.test1(endpoint, { name: 'tester' }); expect(result).toBe(`hello tester`); }); -test('multi-node adapter should register routes', async () => { - const services = new CodeServices(new CodeNodeAdapter(server, log)); +test.skip('multi-node adapter should register routes', async () => { + const services = new CodeServices(new CodeNodeAdapter(router, log)); services.registerHandler(TestDefinition, testServiceHandler); const prefix = DEFAULT_SERVICE_OPTION.routePrefix; @@ -70,8 +72,8 @@ test('multi-node adapter should register routes', async () => { expect(data.result).toBe(`hello tester`); }); -test('non-code-node could send request to code-node', async () => { - const codeNode = new CodeServices(new CodeNodeAdapter(server, log)); +test.skip('non-code-node could send request to code-node', async () => { + const codeNode = new CodeServices(new CodeNodeAdapter(router, log)); const codeNodeUrl = 'http://localhost:5601'; const nonCodeNodeAdapter = new NonCodeNodeAdapter(codeNodeUrl, log); const nonCodeNode = new CodeServices(nonCodeNodeAdapter); @@ -80,13 +82,13 @@ test('non-code-node could send request to code-node', async () => { baseUrl: string, path: string, payload: RequestPayload, - originRequest: Request + originRequest: KibanaRequest ) => { expect(baseUrl).toBe(codeNodeUrl); const response = await hapiServer.inject({ method: 'POST', url: path, - headers: originRequest.headers, + headers: originRequest.headers as any, payload, }); expect(response.statusCode).toBe(200); @@ -96,11 +98,13 @@ test('non-code-node could send request to code-node', async () => { nonCodeNode.registerHandler(TestDefinition, null); const testApi = nonCodeNode.serviceFor(TestDefinition); const fakeRequest = ({ - path: 'fakePath', + route: { + path: 'fakePath', + }, headers: { fakeHeader: 'fakeHeaderValue', }, - } as unknown) as Request; + } as unknown) as KibanaRequest; const fakeResource = 'fakeResource'; const endpoint = await nonCodeNode.locate(fakeRequest, fakeResource); const { result } = await testApi.test1(endpoint, { name: 'tester' }); @@ -108,5 +112,5 @@ test('non-code-node could send request to code-node', async () => { const context = await testApi.test2(endpoint, {}); expect(context.resource).toBe(fakeResource); - expect(context.path).toBe(fakeRequest.path); + expect(context.path).toBe(fakeRequest.route.path); }); diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.ts index 480cab11ed84e..a2abe402a8e52 100644 --- a/x-pack/legacy/plugins/code/server/distributed/code_services.ts +++ b/x-pack/legacy/plugins/code/server/distributed/code_services.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { KibanaRequest } from 'src/core/server'; import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition'; import { DEFAULT_SERVICE_OPTION, @@ -11,7 +12,6 @@ import { ServiceRegisterOptions, } from './service_handler_adapter'; import { Endpoint } from './resource_locator'; -import { RequestFacade } from '../../'; export class CodeServices { constructor(private readonly adapter: ServiceHandlerAdapter) {} @@ -32,11 +32,11 @@ export class CodeServices { await this.adapter.stop(); } - public allocate(req: RequestFacade, resource: string): Promise { + public allocate(req: KibanaRequest, resource: string): Promise { return this.adapter.locator.allocate(req, resource); } - public locate(req: RequestFacade, resource: string): Promise { + public locate(req: KibanaRequest, resource: string): Promise { return this.adapter.locator.locate(req, resource); } diff --git a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts index 689ecc7fc641b..a7da90544fed3 100644 --- a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts @@ -4,17 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { Endpoint } from './resource_locator'; import { RequestContext } from './service_definition'; export class LocalEndpoint implements Endpoint { - constructor(readonly httpRequest: Request, readonly resource: string) {} + constructor(readonly httpRequest: KibanaRequest, readonly resource: string) {} toContext(): RequestContext { return { resource: this.resource, - path: this.httpRequest.path, + path: this.httpRequest.route.path, } as RequestContext; } } diff --git a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts index f4d9b6f1815a0..4f51ee2938366 100644 --- a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { ServiceHandlerAdapter } from './service_handler_adapter'; import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition'; import { Endpoint, ResourceLocator } from './resource_locator'; @@ -45,7 +45,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter { } locator: ResourceLocator = { - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, @@ -53,7 +53,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter { return Promise.resolve(true); }, - async allocate(httpRequest: Request, resource: string): Promise { + async allocate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, }; diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts index 2778d29955e79..a7d2edf4b0308 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import util from 'util'; import Boom from 'boom'; import { @@ -31,10 +31,10 @@ export interface RequestPayload { export class CodeNodeAdapter implements ServiceHandlerAdapter { localAdapter: LocalHandlerAdapter = new LocalHandlerAdapter(); - constructor(private readonly server: CodeServerRouter, private readonly log: Logger) {} + constructor(private readonly router: CodeServerRouter, private readonly log: Logger) {} locator: ResourceLocator = { - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, @@ -42,7 +42,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { return Promise.resolve(false); }, - async allocate(httpRequest: Request, resource: string): Promise { + async allocate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, }; @@ -70,11 +70,16 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { const d = serviceDefinition[method]; const path = `${options.routePrefix}/${d.routePath || method}`; // register routes, receive requests from non-code node. - this.server.route({ + this.router.route({ method: 'post', path, - handler: async (req: Request) => { - const { context, params } = req.payload as RequestPayload; + npHandler: async ( + ctx: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { + // @ts-ignore + const { context, params } = req.body as RequestPayload; this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`); const endpoint: Endpoint = { toContext(): RequestContext { @@ -83,7 +88,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { }; try { const data = await serviceMethodMap[method](endpoint, params); - return { data }; + return res.ok({ body: data }); } catch (e) { if (!Boom.isBoom(e)) { throw Boom.boomify(e, { statusCode: 500 }); diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts index 048b7c81dfe6f..03c4917dfb732 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts @@ -4,12 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { LocalEndpoint } from '../local_endpoint'; export class CodeNodeEndpoint extends LocalEndpoint { constructor( - public readonly httpRequest: Request, + public readonly httpRequest: KibanaRequest, public readonly resource: string, public readonly codeNodeUrl: string ) { diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts index b11ffeba394cf..e4b3d21b80ec7 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { Endpoint, ResourceLocator } from '../resource_locator'; import { CodeNodeEndpoint } from './code_node_endpoint'; export class CodeNodeResourceLocator implements ResourceLocator { constructor(private readonly codeNodeUrl: string) {} - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new CodeNodeEndpoint(httpRequest, resource, this.codeNodeUrl)); } @@ -19,7 +19,7 @@ export class CodeNodeResourceLocator implements ResourceLocator { return Promise.resolve(false); } - allocate(req: Request, resource: string): Promise { + allocate(req: KibanaRequest, resource: string): Promise { return this.locate(req, resource); } } diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts index 648dffd01663e..1221651bc51e2 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts @@ -7,7 +7,7 @@ import Wreck from '@hapi/wreck'; import util from 'util'; import Boom from 'boom'; -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import * as http from 'http'; import { DEFAULT_SERVICE_OPTION, @@ -23,8 +23,8 @@ import { Logger } from '../../log'; const pickHeaders = ['authorization']; -function filterHeaders(originRequest: Request) { - const result: { [name: string]: string } = {}; +function filterHeaders(originRequest: KibanaRequest) { + const result: { [name: string]: string | string[] | undefined } = {}; for (const header of pickHeaders) { if (originRequest.headers[header]) { result[header] = originRequest.headers[header]; @@ -82,7 +82,12 @@ export class NonCodeNodeAdapter implements ServiceHandlerAdapter { return dispatchedHandler as ServiceMethodMap; } - async requestFn(baseUrl: string, path: string, payload: RequestPayload, originRequest: Request) { + async requestFn( + baseUrl: string, + path: string, + payload: RequestPayload, + originRequest: KibanaRequest + ) { const opt = { baseUrl, payload: JSON.stringify(payload), diff --git a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts index 9dc6300675cb6..287e36982cbfd 100644 --- a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { RequestContext } from './service_definition'; export interface Endpoint { @@ -12,7 +12,7 @@ export interface Endpoint { } export interface ResourceLocator { - locate(req: Request, resource: string): Promise; + locate(req: KibanaRequest, resource: string): Promise; /** * Returns whether the resource resides on the local node. This should support both url and uri of the repository. @@ -25,5 +25,5 @@ export interface ResourceLocator { * Allocates the resource to nodes and returns the endpoint corresponds to the allocated node. * If the resource cannot be allocated to any node, it returns undefined. */ - allocate(req: Request, resource: string): Promise; + allocate(req: KibanaRequest, resource: string): Promise; } diff --git a/x-pack/legacy/plugins/code/server/git_operations.ts b/x-pack/legacy/plugins/code/server/git_operations.ts index 54f32ed9b100c..f8e9dc5e589a0 100644 --- a/x-pack/legacy/plugins/code/server/git_operations.ts +++ b/x-pack/legacy/plugins/code/server/git_operations.ts @@ -318,7 +318,7 @@ export class GitOperations { const git = await this.openGit(uri); const commit = await this.getCommitOr404(uri, revision); if (!revision.includes('..')) { - revision = `${revision}..${revision}~1`; + revision = `${revision}~1..${revision}`; } const diffs = await git.diffSummary([revision]); @@ -506,7 +506,7 @@ export class GitOperations { const options: any = { n: count, format: { - updated: '%ai', + updated: '%aI', message: '%B', author: '%an', authorEmail: '%ae', diff --git a/x-pack/legacy/plugins/code/server/indexer/schema/document.ts b/x-pack/legacy/plugins/code/server/indexer/schema/document.ts index e28590bc32839..c2a2cd1e334d5 100644 --- a/x-pack/legacy/plugins/code/server/indexer/schema/document.ts +++ b/x-pack/legacy/plugins/code/server/indexer/schema/document.ts @@ -17,8 +17,6 @@ export const RepositoryDeleteStatusReservedField = 'repository_delete_status'; export const RepositoryIndexStatusReservedField = 'repository_index_status'; // The field name of repository config object nested in the Document index. export const RepositoryConfigReservedField = 'repository_config'; -// The field name of repository config object nested in the Document index. -export const RepositoryRandomPathReservedField = 'repository_random_path'; export const ALL_RESERVED = [ RepositoryReservedField, @@ -26,7 +24,6 @@ export const ALL_RESERVED = [ RepositoryDeleteStatusReservedField, RepositoryIndexStatusReservedField, RepositoryConfigReservedField, - RepositoryRandomPathReservedField, ]; // Correspond to model/search/Document @@ -107,9 +104,6 @@ export const DocumentSchema = { }, }, }, - [RepositoryRandomPathReservedField]: { - type: 'keyword', - }, // A single Repository Git Status object resides in this document index. [RepositoryGitStatusReservedField]: { properties: { diff --git a/x-pack/legacy/plugins/code/server/init_es.ts b/x-pack/legacy/plugins/code/server/init_es.ts index 39ae05bf26877..0b12cddb73983 100644 --- a/x-pack/legacy/plugins/code/server/init_es.ts +++ b/x-pack/legacy/plugins/code/server/init_es.ts @@ -4,17 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Server } from 'hapi'; +import { IClusterClient } from 'src/core/server'; import { RepositoryIndexInitializerFactory } from './indexer'; import { RepositoryConfigController } from './repository_config_controller'; import { EsClientWithInternalRequest } from './utils/esclient_with_internal_request'; import { EsClient } from './lib/esqueue'; import { Logger } from './log'; -export async function initEs(server: Server, log: Logger) { - // wait until elasticsearch is ready - await server.plugins.elasticsearch.waitUntilReady(); - const esClient: EsClient = new EsClientWithInternalRequest(server); +export async function initEs(cluster: IClusterClient, log: Logger) { + const esClient: EsClient = new EsClientWithInternalRequest(cluster); const repoConfigController = new RepositoryConfigController(esClient); const repoIndexInitializerFactory = new RepositoryIndexInitializerFactory(esClient, log); return { diff --git a/x-pack/legacy/plugins/code/server/init_workers.ts b/x-pack/legacy/plugins/code/server/init_workers.ts index c4385cd711c5c..f20adf375f9a3 100644 --- a/x-pack/legacy/plugins/code/server/init_workers.ts +++ b/x-pack/legacy/plugins/code/server/init_workers.ts @@ -5,7 +5,6 @@ */ import checkDiskSpace from 'check-disk-space'; -import { Server } from 'hapi'; import { IndexerType } from '../model'; import { DiskWatermarkService } from './disk_watermark'; @@ -22,7 +21,6 @@ import { CloneScheduler, IndexScheduler, UpdateScheduler } from './scheduler'; import { Logger } from './log'; export function initWorkers( - server: Server, log: Logger, esClient: EsClient, queue: Esqueue, diff --git a/x-pack/legacy/plugins/code/server/lsp/workspace_handler.ts b/x-pack/legacy/plugins/code/server/lsp/workspace_handler.ts index fcec11ab19a42..85506d5d6e4d2 100644 --- a/x-pack/legacy/plugins/code/server/lsp/workspace_handler.ts +++ b/x-pack/legacy/plugins/code/server/lsp/workspace_handler.ts @@ -9,6 +9,7 @@ import del from 'del'; import fs from 'fs'; import { delay } from 'lodash'; import path from 'path'; +import crypto from 'crypto'; import { ResponseMessage } from 'vscode-jsonrpc/lib/messages'; import { Hover, Location, TextDocumentPositionParams } from 'vscode-languageserver'; @@ -97,7 +98,7 @@ export class WorkspaceHandler { wt = await this.openWorktree( git, workspaceBranch, - await this.revisionDir(repositoryUri, ref), + await this.revisionDir(repositoryUri, ref, this.randomPath()), targetRevision ); } @@ -110,6 +111,10 @@ export class WorkspaceHandler { }; } + private randomPath() { + return crypto.randomBytes(4).toString('hex'); + } + public async openWorktree( git: SimpleGit, workspaceBranch: string, @@ -378,14 +383,20 @@ export class WorkspaceHandler { } } - public async revisionDir(repositoryUri: string, ref: string) { - return path.join(await this.workspaceDir(repositoryUri), ref); + public async revisionDir(repositoryUri: string, ref: string, randomStr: string = '') { + return path.join(await this.workspaceDir(repositoryUri, randomStr), ref); } - private async workspaceDir(repoUri: string) { - const randomStr = - this.objectClient && (await this.objectClient.getRepositoryRandomStr(repoUri)); + private async workspaceDir(repoUri: string, randomStr: string = '') { const base = path.join(this.workspacePath, repoUri); + if (randomStr === '') { + const git = await this.gitOps.openGit(repoUri); + const trees = await this.listWorktrees(git); + if (trees.size > 0) { + const wt = trees.values().next().value; + return path.dirname(wt.path); + } + } if (randomStr) { return path.join(base, `__${randomStr}`); } else { diff --git a/x-pack/legacy/plugins/code/server/plugin.ts b/x-pack/legacy/plugins/code/server/plugin.ts index 390b0ddc1256c..737d0b5c6686b 100644 --- a/x-pack/legacy/plugins/code/server/plugin.ts +++ b/x-pack/legacy/plugins/code/server/plugin.ts @@ -6,7 +6,7 @@ import crypto from 'crypto'; import * as _ from 'lodash'; -import { CoreSetup } from 'src/core/server'; +import { CoreSetup, IRouter } from 'src/core/server'; import { RepositoryIndexInitializerFactory, tryMigrateIndices } from './indexer'; import { Esqueue } from './lib/esqueue'; @@ -55,6 +55,18 @@ import { NodeRepositoriesService } from './distributed/cluster/node_repositories import { initCodeUsageCollector } from './usage_collector'; import { PluginSetupContract } from '../../../../plugins/code/server/index'; +declare module 'src/core/server' { + interface RequestHandlerContext { + code: { + codeServices: CodeServices | null; + // @deprecated + legacy: { + securityPlugin: any; + }; + }; + } +} + export class CodePlugin { private isCodeNode = false; @@ -67,15 +79,30 @@ export class CodePlugin { private codeServices: CodeServices | null = null; private nodeService: NodeRepositoriesService | null = null; + private rndString: string | null = null; + private router: IRouter | null = null; + constructor(private readonly initContext: PluginSetupContract) { this.log = {} as Logger; this.serverOptions = {} as ServerOptions; } - public setup(core: CoreSetup) { + public async setup(core: CoreSetup, npHttp: any) { const { server } = core.http as any; this.serverOptions = new ServerOptions(this.initContext.legacy.config, server.config()); this.log = new Logger(this.initContext.legacy.logger, this.serverOptions.verbose); + + this.router = npHttp.createRouter(); + this.rndString = crypto.randomBytes(20).toString('hex'); + + npHttp.registerRouteHandlerContext('code', () => { + return { + codeServices: this.codeServices, + legacy: { + securityPlugin: server.plugins.security, + }, + }; + }); } // TODO: CodeStart will not have the register route api. @@ -83,16 +110,17 @@ export class CodePlugin { public async start(core: CoreSetup) { // called after all plugins are set up const { server } = core.http as any; - const codeServerRouter = new CodeServerRouter(server); + const codeServerRouter = new CodeServerRouter(this.router!); const codeNodeUrl = this.serverOptions.codeNodeUrl; - const rndString = crypto.randomBytes(20).toString('hex'); - checkRoute(server, rndString); + + checkRoute(this.router!, this.rndString!); + if (this.serverOptions.clusterEnabled) { this.initDevMode(server); this.codeServices = await this.initClusterNode(server, codeServerRouter); } else if (codeNodeUrl) { const checkResult = await this.retryUntilAvailable( - async () => await checkCodeNode(codeNodeUrl, this.log, rndString), + async () => await checkCodeNode(codeNodeUrl, this.log, this.rndString!), 5000 ); if (checkResult.me) { @@ -115,7 +143,7 @@ export class CodePlugin { private async initClusterNode(server: any, codeServerRouter: CodeServerRouter) { this.log.info('Initializing Code plugin as cluster-node'); const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs( - server, + this.initContext.legacy.elasticsearch.adminClient$, this.log ); const clusterNodeAdapter = new ClusterNodeAdapter( @@ -139,7 +167,6 @@ export class CodePlugin { ); this.lspService = lspService; const { indexScheduler, updateScheduler, cloneWorker } = initWorkers( - server, this.log, esClient, this.queue!, @@ -159,18 +186,18 @@ export class CodePlugin { ); await this.nodeService.start(); + this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); + // Execute index version checking and try to migrate index data if necessary. await tryMigrateIndices(esClient, this.log); - this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); - return codeServices; } private async initCodeNode(server: any, codeServices: CodeServices) { this.isCodeNode = true; const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs( - server, + this.initContext.legacy.elasticsearch.adminClient$, this.log ); @@ -186,7 +213,6 @@ export class CodePlugin { ); this.lspService = lspService; const { indexScheduler, updateScheduler } = initWorkers( - server, this.log, esClient, this.queue!, @@ -198,14 +224,14 @@ export class CodePlugin { this.indexScheduler = indexScheduler; this.updateScheduler = updateScheduler; - // Execute index version checking and try to migrate index data if necessary. - await tryMigrateIndices(esClient, this.log); - this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); // TODO: extend the usage collection to cluster mode. initCodeUsageCollector(server, esClient, lspService); + // Execute index version checking and try to migrate index data if necessary. + await tryMigrateIndices(esClient, this.log); + return codeServices; } @@ -235,7 +261,10 @@ export class CodePlugin { codeServices.registerHandler(LspServiceDefinition, null, LspServiceDefinitionOption); codeServices.registerHandler(WorkspaceDefinition, null); codeServices.registerHandler(SetupDefinition, null); - const { repoConfigController, repoIndexInitializerFactory } = await initEs(server, this.log); + const { repoConfigController, repoIndexInitializerFactory } = await initEs( + this.initContext.legacy.elasticsearch.adminClient$, + this.log + ); this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); return codeServices; } @@ -246,7 +275,7 @@ export class CodePlugin { repoIndexInitializerFactory: RepositoryIndexInitializerFactory, repoConfigController: RepositoryConfigController ) { - const codeServerRouter = new CodeServerRouter(server); + const codeServerRouter = new CodeServerRouter(this.router!); repositoryRoute( codeServerRouter, codeServices, @@ -264,7 +293,7 @@ export class CodePlugin { fileRoute(codeServerRouter, codeServices); workspaceRoute(codeServerRouter, this.serverOptions, codeServices); symbolByQnameRoute(codeServerRouter, this.log); - installRoute(codeServerRouter, codeServices, this.serverOptions); + installRoute(server, codeServerRouter, codeServices, this.serverOptions); lspRoute(codeServerRouter, codeServices, this.serverOptions, this.log); setupRoute(codeServerRouter, codeServices); statusRoute(codeServerRouter, codeServices); diff --git a/x-pack/legacy/plugins/code/server/queue/delete_worker.test.ts b/x-pack/legacy/plugins/code/server/queue/delete_worker.test.ts index 86424db7f19d7..fd6de59ffa722 100644 --- a/x-pack/legacy/plugins/code/server/queue/delete_worker.test.ts +++ b/x-pack/legacy/plugins/code/server/queue/delete_worker.test.ts @@ -179,9 +179,7 @@ test('On delete job uri does not exist.', async () => { log, esClient as EsClient, { - security: { - enableGitCertCheck: true, - }, + security: {}, } as ServerOptions, (gitOps as any) as GitOperations, (cancellationService as any) as CancellationSerivce, @@ -279,9 +277,7 @@ test('On delete job uri contains ../', async () => { log, esClient as EsClient, { - security: { - enableGitCertCheck: true, - }, + security: {}, } as ServerOptions, (gitOps as any) as GitOperations, (cancellationService as any) as CancellationSerivce, diff --git a/x-pack/legacy/plugins/code/server/routes/check.ts b/x-pack/legacy/plugins/code/server/routes/check.ts index ad89d6281b4ff..7e585ffc34922 100644 --- a/x-pack/legacy/plugins/code/server/routes/check.ts +++ b/x-pack/legacy/plugins/code/server/routes/check.ts @@ -4,10 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ +import { schema } from '@kbn/config-schema'; import fetch from 'node-fetch'; +import { + IRouter, + KibanaRequest, + KibanaResponseFactory, + RequestHandlerContext, +} from 'src/core/server'; import { Logger } from '../log'; -import { ServerFacade } from '../..'; export async function checkCodeNode(url: string, log: Logger, rndStr: string) { try { @@ -24,13 +30,22 @@ export async function checkCodeNode(url: string, log: Logger, rndStr: string) { return null; } -export function checkRoute(server: ServerFacade, rndStr: string) { - server.route({ - method: 'GET', - path: '/api/code/codeNode', - options: { auth: false }, - handler(req: any) { - return { me: req.query.rndStr === rndStr }; +export function checkRoute(router: IRouter, rndStr: string) { + router.get( + { + path: '/api/code/codeNode', + validate: { + query: schema.object({}, { allowUnknowns: true }), + }, + options: { + authRequired: false, + }, }, - }); + (context: RequestHandlerContext, req: KibanaRequest, res: KibanaResponseFactory) => { + return res.ok({ + // @ts-ignore + body: { me: req.query.rndStr === rndStr }, + }); + } + ); } diff --git a/x-pack/legacy/plugins/code/server/routes/file.ts b/x-pack/legacy/plugins/code/server/routes/file.ts index 10a9050fa0a90..47cc16f7a6574 100644 --- a/x-pack/legacy/plugins/code/server/routes/file.ts +++ b/x-pack/legacy/plugins/code/server/routes/file.ts @@ -4,9 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - -import { RequestFacade, RequestQueryFacade, ResponseToolkitFacade } from '../../'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { DEFAULT_TREE_CHILDREN_LIMIT } from '../git_operations'; import { CodeServerRouter } from '../security'; import { RepositoryObjectClient } from '../search'; @@ -20,14 +18,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) const gitService = codeServices.serviceFor(GitServiceDefinition); async function getRepoUriFromMeta( - req: RequestFacade, + context: RequestHandlerContext, + req: KibanaRequest, repoUri: string ): Promise { - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { const repo = await repoObjectClient.getRepository(repoUri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri); return repo.uri; } catch (e) { return undefined; @@ -37,23 +36,27 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/tree/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const queries = req.query as RequestQueryFacade; + const queries = req.query as any; const limit = queries.limit ? parseInt(queries.limit as string, 10) : DEFAULT_TREE_CHILDREN_LIMIT; const skip = queries.skip ? parseInt(queries.skip as string, 10) : 0; const withParents = 'parents' in queries; const flatten = 'flatten' in queries; - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.fileTree(endpoint, { + const filetree = await gitService.fileTree(endpoint, { uri: repoUri, path, revision, @@ -62,11 +65,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) withParents, flatten, }); + return res.ok({ body: filetree }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -75,46 +82,59 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/blob/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { const blob = await gitService.blob(endpoint, { uri, path, - line: (req.query as RequestQueryFacade).line as string, + line: (req.query as any).line as string, revision: decodeURIComponent(revision), }); if (blob.imageType) { - const response = h.response(blob.content); - response.type(blob.imageType); - return response; + return res.ok({ + body: blob.content, + headers: { 'Content-Type': blob.imageType }, + }); } else if (blob.isBinary) { - return h - .response('') - .type('application/octet-stream') - .code(204); + return res.noContent({ + headers: { 'Content-Type': 'application/octet-stream' }, + }); } else { if (blob.content) { - return h - .response(blob.content) - .type('text/plain') - .header('lang', blob.lang!); + return res.ok({ + body: blob.content, + headers: { + 'Content-Type': 'text/plain', + lang: blob.lang!, + }, + }); } else { - return h.response('').type(`text/big`); + return res.ok({ + body: blob.content, + headers: { 'Content-Type': 'text/big' }, + }); } } } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -123,27 +143,40 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/app/code/repo/{uri*3}/raw/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { const blob = await gitService.raw(endpoint, { uri: repoUri, path, revision }); if (blob.isBinary) { - return h.response(blob.content).encoding('binary'); + return res.ok({ + body: blob.content, + headers: { 'Content-Transfer-Encoding': 'binary' }, + }); } else { - return h.response(blob.content).type('text/plain'); + return res.ok({ + body: blob.content, + headers: { 'Content-Type': 'text/plain' }, + }); } } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -152,33 +185,47 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/history/{ref}', method: 'GET', - handler: historyHandler, + npHandler: historyHandler, }); router.route({ path: '/api/code/repo/{uri*3}/history/{ref}/{path*}', method: 'GET', - handler: historyHandler, + npHandler: historyHandler, }); - async function historyHandler(req: RequestFacade) { - const { uri, ref, path } = req.params; + async function historyHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, ref, path } = req.params as any; const revision = decodeRevisionString(ref); - const queries = req.query as RequestQueryFacade; + const queries = req.query as any; const count = queries.count ? parseInt(queries.count as string, 10) : 10; const after = queries.after !== undefined; try { - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); - return await gitService.history(endpoint, { uri: repoUri, path, revision, count, after }); + const history = await gitService.history(endpoint, { + uri: repoUri, + path, + revision, + count, + after, + }); + return res.ok({ body: history }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } } @@ -186,21 +233,29 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/references', method: 'GET', - async handler(req: RequestFacade) { - const uri = req.params.uri; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.badRequest({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.branchesAndTags(endpoint, { uri: repoUri }); + const branchesAndTags = await gitService.branchesAndTags(endpoint, { uri: repoUri }); + return res.ok({ body: branchesAndTags }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -209,23 +264,31 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/diff/{revision}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, revision } = req.params; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, revision } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.commitDiff(endpoint, { + const diff = await gitService.commitDiff(endpoint, { uri: repoUri, revision: decodeRevisionString(revision), }); + return res.ok({ body: diff }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -234,25 +297,33 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/blame/{revision}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, revision } = req.params; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, revision } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.blame(endpoint, { + const blames = await gitService.blame(endpoint, { uri: repoUri, revision: decodeRevisionString(decodeURIComponent(revision)), path, }); + return res.ok({ body: blames }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, diff --git a/x-pack/legacy/plugins/code/server/routes/index.ts b/x-pack/legacy/plugins/code/server/routes/index.ts index 27f40de552a3e..82973ac1d2791 100644 --- a/x-pack/legacy/plugins/code/server/routes/index.ts +++ b/x-pack/legacy/plugins/code/server/routes/index.ts @@ -8,7 +8,6 @@ export * from './check'; export * from './file'; export * from './install'; export * from './lsp'; -export * from './redirect'; export * from './repository'; export * from './search'; export * from './setup'; diff --git a/x-pack/legacy/plugins/code/server/routes/install.ts b/x-pack/legacy/plugins/code/server/routes/install.ts index 338f305cba858..28ccc4012ceec 100644 --- a/x-pack/legacy/plugins/code/server/routes/install.ts +++ b/x-pack/legacy/plugins/code/server/routes/install.ts @@ -4,9 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import * as Boom from 'boom'; - -import { RequestFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; +import { ServerFacade } from '../..'; import { enabledLanguageServers, LanguageServerDefinition } from '../lsp/language_servers'; import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; @@ -15,12 +14,13 @@ import { Endpoint } from '../distributed/resource_locator'; import { ServerOptions } from '../server_options'; export function installRoute( + server: ServerFacade, router: CodeServerRouter, codeServices: CodeServices, options: ServerOptions ) { const lspService = codeServices.serviceFor(LspServiceDefinition); - const kibanaVersion = router.server.config().get('pkg.version') as string; + const kibanaVersion = server.config().get('pkg.version') as string; const status = async (endpoint: Endpoint, def: LanguageServerDefinition) => ({ name: def.name, status: await lspService.languageServerStatus(endpoint, { langName: def.name }), @@ -35,23 +35,35 @@ export function installRoute( router.route({ path: '/api/code/install', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const endpoint = await codeServices.locate(req, ''); - return await Promise.all(enabledLanguageServers(options).map(def => status(endpoint, def))); + const installRes = await Promise.all( + enabledLanguageServers(options).map(def => status(endpoint, def)) + ); + return res.ok({ body: installRes }); }, method: 'GET', }); router.route({ path: '/api/code/install/{name}', - async handler(req: RequestFacade) { - const name = req.params.name; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { name } = req.params as any; const def = enabledLanguageServers(options).find(d => d.name === name); const endpoint = await codeServices.locate(req, ''); if (def) { - return await status(endpoint, def); + const installRes = await status(endpoint, def); + return res.ok({ body: installRes }); } else { - return Boom.notFound(`language server ${name} not found.`); + return res.notFound({ body: `language server ${name} not found.` }); } }, method: 'GET', diff --git a/x-pack/legacy/plugins/code/server/routes/lsp.ts b/x-pack/legacy/plugins/code/server/routes/lsp.ts index 10acb1e3863e8..6b8af10f9f11e 100644 --- a/x-pack/legacy/plugins/code/server/routes/lsp.ts +++ b/x-pack/legacy/plugins/code/server/routes/lsp.ts @@ -4,10 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; import { ResponseError } from 'vscode-jsonrpc'; import { ResponseMessage } from 'vscode-jsonrpc/lib/messages'; import { SymbolLocator } from '@elastic/lsp-extension'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { LanguageServerStartFailed, @@ -22,7 +22,6 @@ import { ServerOptions } from '../server_options'; import { EsClientWithRequest } from '../utils/esclient_with_request'; import { promiseTimeout } from '../utils/timeout'; -import { RequestFacade, ResponseToolkitFacade } from '../..'; import { CodeServices } from '../distributed/code_services'; import { GitServiceDefinition, LspServiceDefinition } from '../distributed/apis'; import { findTitleFromHover, groupFiles } from '../utils/lsp_utils'; @@ -32,7 +31,7 @@ import { SymbolSearchResult } from '../../model'; const LANG_SERVER_ERROR = 'language server error'; export function lspRoute( - server: CodeServerRouter, + router: CodeServerRouter, codeServices: CodeServices, serverOptions: ServerOptions, log: Logger @@ -40,23 +39,29 @@ export function lspRoute( const lspService = codeServices.serviceFor(LspServiceDefinition); const gitService = codeServices.serviceFor(GitServiceDefinition); - server.route({ + router.route({ path: '/api/code/lsp/textDocument/{method}', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - if (typeof req.payload === 'object' && req.payload != null) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + if (typeof req.body === 'object' && req.body != null) { + // @ts-ignore const method = req.params.method; if (method) { try { - const params = (req.payload as unknown) as any; + const params = (req.body as unknown) as any; const uri = params.textDocument.uri; const { repoUri } = parseLspUrl(uri)!; - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const requestPromise = lspService.sendRequest(endpoint, { method: `textDocument/${method}`, - params: req.payload, + params: req.body, }); - return await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise); + const result = await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise); + return res.ok({ body: result }); } catch (error) { if (error instanceof ResponseError) { // hide some errors; @@ -67,39 +72,48 @@ export function lspRoute( ) { log.debug(error); } - return h - .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); // different code for LS errors and other internal errors. + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } else if (error.isBoom) { - return error; + return res.customError({ + body: error.error, + statusCode: error.statusCode ? error.statusCode : 500, + }); } else { log.error(error); - return h - .response({ error: { code: error.code || 500, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } } } else { - return h.response('missing `method` in request').code(400); + return res.badRequest({ body: 'missing `method` in request' }); } } else { - return h.response('json body required').code(400); // bad request + return res.badRequest({ body: 'json body required' }); } }, method: 'POST', }); - server.route({ + router.route({ path: '/api/code/lsp/findDefinitions', method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { // @ts-ignore - const { textDocument, position } = req.payload; + const { textDocument, position } = req.body as any; + // @ts-ignore + const { qname } = req.params as any; const { uri } = textDocument; const { repoUri } = parseLspUrl(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const response: ResponseMessage = await promiseTimeout( serverOptions.lsp.requestTimeoutMs, @@ -116,16 +130,16 @@ export function lspRoute( }, }); const title: string = await findTitleFromHover(hover, uri, position); - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); const locators = response.result as SymbolLocator[]; const locations = []; - const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); + const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences(); for (const locator of locators) { if (locator.location) { locations.push(locator.location); } else if (locator.qname && repoScope.length > 0) { - const searchResults = await symbolSearchClient.findByQname(req.params.qname, repoScope); + const searchResults = await symbolSearchClient.findByQname(qname, repoScope); for (const symbol of searchResults.symbols) { locations.push(symbol.symbolInformation.location); } @@ -135,20 +149,23 @@ export function lspRoute( const ep = await codeServices.locate(req, loc.uri); return await gitService.blob(ep, loc); }); - return { title, files, uri, position }; + return res.ok({ body: { title, files, uri, position } }); }, }); - server.route({ + router.route({ path: '/api/code/lsp/findReferences', method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { try { - // @ts-ignore - const { textDocument, position } = req.payload; + const { textDocument, position } = req.body as any; const { uri } = textDocument; const { repoUri } = parseLspUrl(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const response: ResponseMessage = await promiseTimeout( serverOptions.lsp.requestTimeoutMs, @@ -169,21 +186,24 @@ export function lspRoute( const ep = await codeServices.locate(req, loc.uri); return await gitService.blob(ep, loc); }); - return { title, files, uri, position }; + return res.ok({ body: { title, files, uri, position } }); } catch (error) { log.error(error); if (error instanceof ResponseError) { - return h - .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); // different code for LS errors and other internal errors. + return res.custom({ + statusCode: 500, + body: { error: { code: error.code, msg: LANG_SERVER_ERROR } }, + }); } else if (error.isBoom) { - return error; + return res.customError({ + body: error.error, + statusCode: error.statusCode ? error.statusCode : 500, + }); } else { - return h - .response({ error: { code: 500, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } } }, @@ -194,21 +214,26 @@ export function symbolByQnameRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/lsp/symbol/{qname}', method: 'GET', - async handler(req: RequestFacade) { - try { - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); - const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); - if (repoScope.length === 0) { - return { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + // @ts-ignore + const { qname } = req.params as any; + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); + const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences(); + if (repoScope.length === 0) { + return res.ok({ + body: { symbols: [], total: 0, took: 0, - } as SymbolSearchResult; - } - return await symbolSearchClient.findByQname(req.params.qname, repoScope); - } catch (error) { - return Boom.internal(`Search Exception`); + } as SymbolSearchResult, + }); } + const symbol = await symbolSearchClient.findByQname(qname, repoScope); + return res.ok({ body: symbol }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/redirect.ts b/x-pack/legacy/plugins/code/server/routes/redirect.ts deleted file mode 100644 index 2882a37334836..0000000000000 --- a/x-pack/legacy/plugins/code/server/routes/redirect.ts +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { RequestFacade, ServerFacade } from '../../'; -import { Logger } from '../log'; - -export function redirectRoute(server: ServerFacade, redirectUrl: string, log: Logger) { - const proxyHandler = { - proxy: { - passThrough: true, - async mapUri(request: RequestFacade) { - let uri; - uri = `${redirectUrl}${request.path}`; - if (request.url.search) { - uri += request.url.search; - } - log.info(`redirect ${request.path}${request.url.search || ''} to ${uri}`); - return { - uri, - }; - }, - }, - }; - - server.route({ - path: '/api/code/{p*}', - method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], - handler: proxyHandler, - }); - - server.route({ - path: '/api/code/lsp/{p*}', - method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], - handler: proxyHandler, - }); -} diff --git a/x-pack/legacy/plugins/code/server/routes/repository.ts b/x-pack/legacy/plugins/code/server/routes/repository.ts index 5947dc869968a..d9e8edb4d2f50 100644 --- a/x-pack/legacy/plugins/code/server/routes/repository.ts +++ b/x-pack/legacy/plugins/code/server/routes/repository.ts @@ -4,10 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - import { i18n } from '@kbn/i18n'; -import { RequestFacade, ResponseToolkitFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; + import { validateGitUrl } from '../../common/git_url_utils'; import { RepositoryUtils } from '../../common/repository_utils'; import { RepositoryConfig, RepositoryUri, WorkerReservedProgress } from '../../model'; @@ -36,8 +35,12 @@ export function repositoryRoute( path: '/api/code/repo', requireAdmin: true, method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const repoUrl: string = (req.payload as any).url; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const repoUrl: string = (req.body as any).url; // Reject the request if the url is an invalid git url. try { @@ -49,11 +52,11 @@ export function repositoryRoute( } catch (error) { log.error(`Validate git url ${repoUrl} error.`); log.error(error); - return Boom.badRequest(error); + return res.badRequest({ body: error }); } const repo = RepositoryUtils.buildRepository(repoUrl); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // Check if the repository already exists @@ -61,28 +64,32 @@ export function repositoryRoute( // distinguish between that the repository exists in the current space and that the repository exists in // another space, and return the default message if error happens during reference checking. try { - const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference( + const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference( repo.uri ); if (!hasRef) { - return Boom.conflict( - i18n.translate('xpack.code.repositoryManagement.repoOtherSpaceImportedMessage', { - defaultMessage: 'The repository has already been imported in another space!', - }) - ); + return res.custom({ + statusCode: 409, // conflict + body: i18n.translate( + 'xpack.code.repositoryManagement.repoOtherSpaceImportedMessage', + { + defaultMessage: 'The repository has already been imported in another space!', + } + ), + }); } } catch (e) { log.error(`Failed to check reference for ${repo.uri} in current space`); } const msg = `Repository ${repoUrl} already exists. Skip clone.`; log.info(msg); - return h.response(msg).code(304); // Not Modified + return res.custom({ statusCode: 304, body: msg }); } catch (error) { log.info(`Repository ${repoUrl} does not exist. Go ahead with clone.`); try { // create the reference first, and make the creation idempotent, to avoid potential dangling repositories // which have no references from any space, in case the writes to ES may fail independently - await getReferenceHelper(req.getSavedObjectsClient()).createReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).createReference(repo.uri); // Create the index for the repository const initializer = (await repoIndexInitializerFactory.create( @@ -93,10 +100,7 @@ export function repositoryRoute( // Persist to elasticsearch await repoObjectClient.setRepository(repo.uri, repo); - const randomStr = Math.random() - .toString(36) - .substring(2, 15); - await repoObjectClient.setRepositoryRandomStr(repo.uri, randomStr); + // Kick off clone job const payload = { url: repoUrl, @@ -108,12 +112,12 @@ export function repositoryRoute( if (endpoint) { await repositoryService.clone(endpoint, payload); } - return repo; + return res.ok({ body: repo }); } catch (error2) { const msg = `Issue repository clone request for ${repoUrl} error`; log.error(msg); log.error(error2); - return Boom.badRequest(msg); + return res.badRequest({ body: msg }); } } }, @@ -124,12 +128,16 @@ export function repositoryRoute( path: '/api/code/repo/{uri*3}', requireAdmin: true, method: 'DELETE', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const repoUri: string = req.params.uri as string; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // make sure the repo belongs to the current space - getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); // Check if the repository already exists. If not, an error will be thrown. await repoObjectClient.getRepository(repoUri); @@ -142,7 +150,7 @@ export function repositoryRoute( if (status.progress !== WorkerReservedProgress.ERROR) { const msg = `Repository ${repoUri} is already in delete.`; log.info(msg); - return h.response(msg).code(304); // Not Modified + return res.custom({ statusCode: 304, body: msg }); } } catch (error) { // Do nothing here since this error is expected. @@ -154,15 +162,14 @@ export function repositoryRoute( }; const endpoint = await codeServices.locate(req, repoUri); await repositoryService.delete(endpoint, payload); - // delete the reference last to avoid dangling repositories - await getReferenceHelper(req.getSavedObjectsClient()).deleteReference(repoUri); - return {}; + await getReferenceHelper(context.core.savedObjects.client).deleteReference(repoUri); + return res.ok(); } catch (error) { const msg = `Issue repository delete request for ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -171,17 +178,22 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepository(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const repo = await repoObjectClient.getRepository(repoUri); + return res.ok({ body: repo }); } catch (error) { const msg = `Get repository ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -189,15 +201,20 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/status/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); let gitStatus = null; let indexStatus = null; let deleteStatus = null; - const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference(repoUri); + const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference( + repoUri + ); if (hasRef) { try { @@ -218,16 +235,17 @@ export function repositoryRoute( log.debug(`Get repository delete status ${repoUri} error: ${error}`); } } - return { + const status = { gitStatus, indexStatus, deleteStatus, }; + return res.ok({ body: status }); } catch (error) { const msg = `Get repository status ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -236,16 +254,21 @@ export function repositoryRoute( router.route({ path: '/api/code/repos', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { try { - const uris = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepositories(uris); + const uris = await getReferenceHelper(context.core.savedObjects.client).findReferences(); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const repo = await repoObjectClient.getRepositories(uris); + return res.ok({ body: repo }); } catch (error) { const msg = `Get all repositories error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -257,12 +280,16 @@ export function repositoryRoute( path: '/api/code/repo/index/{uri*3}', method: 'POST', requireAdmin: true, - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; - const reindex: boolean = (req.payload as any).reindex; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; + const reindex: boolean = (req.body as any).reindex; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); const cloneStatus = await repoObjectClient.getRepositoryGitStatus(repoUri); const payload = { @@ -272,12 +299,12 @@ export function repositoryRoute( }; const endpoint = await codeServices.locate(req, repoUri); await repositoryService.index(endpoint, payload); - return {}; + return res.ok(); } catch (error) { const msg = `Index repository ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -287,29 +314,33 @@ export function repositoryRoute( path: '/api/code/repo/config/{uri*3}', method: 'PUT', requireAdmin: true, - async handler(req: RequestFacade) { - const config: RepositoryConfig = req.payload as RepositoryConfig; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const config: RepositoryConfig = req.body as RepositoryConfig; const repoUri: RepositoryUri = config.uri; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // Check if the repository exists - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); await repoObjectClient.getRepository(repoUri); } catch (error) { - return Boom.badRequest(`Repository not existed for ${repoUri}`); + return res.badRequest({ body: `Repository not existed for ${repoUri}` }); } try { // Persist to elasticsearch await repoObjectClient.setRepositoryConfig(repoUri, config); repoConfigController.resetConfigCache(repoUri); - return {}; + return res.ok(); } catch (error) { const msg = `Update repository config for ${repoUri} error`; log.error(msg); log.error(error); - return Boom.badRequest(msg); + return res.notFound({ body: msg }); } }, }); @@ -318,14 +349,19 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/config/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepositoryConfig(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const config = await repoObjectClient.getRepositoryConfig(repoUri); + return res.ok({ body: config }); } catch (error) { - return Boom.notFound(`Repository config ${repoUri} not exist`); + return res.notFound({ body: `Repository config ${repoUri} not exist` }); } }, }); diff --git a/x-pack/legacy/plugins/code/server/routes/search.ts b/x-pack/legacy/plugins/code/server/routes/search.ts index 86bdc931cff7a..5c2b731b33c42 100644 --- a/x-pack/legacy/plugins/code/server/routes/search.ts +++ b/x-pack/legacy/plugins/code/server/routes/search.ts @@ -4,9 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - -import { RequestFacade, RequestQueryFacade } from '../../'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { CommitSearchRequest, DocumentSearchRequest, @@ -32,9 +30,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/repo', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -42,14 +44,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: RepositorySearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log); - const res = await repoSearchClient.search(searchReq); - return res; + const repoSearchClient = new RepositorySearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await repoSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -57,9 +62,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/repo', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -67,14 +76,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: RepositorySearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log); - const res = await repoSearchClient.suggest(searchReq); - return res; + const repoSearchClient = new RepositorySearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await repoSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -84,9 +96,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/doc', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, langs, repos, repoScope } = req.query as RequestQueryFacade; + const { p, q, langs, repos, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -96,14 +112,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { page, langFilters: langs ? (langs as string).split(',') : [], repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [], - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log); - const res = await docSearchClient.search(searchReq); - return res; + const docSearchClient = new DocumentSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await docSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -111,9 +130,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/doc', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -121,14 +144,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: DocumentSearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log); - const res = await docSearchClient.suggest(searchReq); - return res; + const docSearchClient = new DocumentSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await docSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -143,14 +169,21 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/integration/snippets', method: 'POST', - async handler(req: RequestFacade) { - const reqs: StackTraceSnippetsRequest[] = (req.payload as any).requests; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const scopes = new Set( - await getReferenceHelper(req.getSavedObjectsClient()).findReferences() + await getReferenceHelper(context.core.savedObjects.client).findReferences() ); - return await Promise.all( + const reqs: StackTraceSnippetsRequest[] = (req.body as any).requests; + const searchRes = await Promise.all( reqs.map((stacktraceReq: StackTraceSnippetsRequest) => { - const integClient = new IntegrationsSearchClient(new EsClientWithRequest(req), log); + const integClient = new IntegrationsSearchClient( + new EsClientWithRequest(context, req), + log + ); return Promise.all( stacktraceReq.stacktraceItems.map((stacktrace: StackTraceItem) => { const repoUris = stacktraceReq.repoUris.filter(uri => scopes.has(uri)); @@ -166,14 +199,19 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { ); }) ); + return res.ok({ body: searchRes }); }, }); } export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { - const symbolSearchHandler = async (req: RequestFacade) => { + const symbolSearchHandler = async ( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -181,14 +219,14 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: SymbolSearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); - const res = await symbolSearchClient.suggest(searchReq); - return res; + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); + const searchRes = await symbolSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }; @@ -196,12 +234,12 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/symbol', method: 'GET', - handler: symbolSearchHandler, + npHandler: symbolSearchHandler, }); router.route({ path: '/api/code/search/symbol', method: 'GET', - handler: symbolSearchHandler, + npHandler: symbolSearchHandler, }); } @@ -209,9 +247,13 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/commit', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repos, repoScope } = req.query as RequestQueryFacade; + const { p, q, repos, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -220,21 +262,27 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) { query: q as string, page, repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [], - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const commitSearchClient = new CommitSearchClient(new EsClientWithRequest(req), log); - const res = await commitSearchClient.search(searchReq); - return res; + const commitSearchClient = new CommitSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await commitSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); } -async function getScope(req: RequestFacade, repoScope: string | string[]): Promise { - let scope: string[] = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); +async function getScope( + context: RequestHandlerContext, + repoScope: string | string[] +): Promise { + let scope: string[] = await getReferenceHelper(context.core.savedObjects.client).findReferences(); if (typeof repoScope === 'string') { const uriSet = new Set(repoScope.split(',')); scope = scope.filter(uri => uriSet.has(uri)); diff --git a/x-pack/legacy/plugins/code/server/routes/setup.ts b/x-pack/legacy/plugins/code/server/routes/setup.ts index 58db84fd80aaf..6f89ebf35441f 100644 --- a/x-pack/legacy/plugins/code/server/routes/setup.ts +++ b/x-pack/legacy/plugins/code/server/routes/setup.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; + import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; import { SetupDefinition } from '../distributed/apis'; @@ -14,9 +15,14 @@ export function setupRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ method: 'get', path: '/api/code/setup', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const endpoint = await codeServices.locate(req, ''); - return await setupService.setup(endpoint, {}); + const setup = await setupService.setup(endpoint, {}); + return res.ok({ body: setup }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/status.ts b/x-pack/legacy/plugins/code/server/routes/status.ts index 56b2972bd4147..e2723342b49d2 100644 --- a/x-pack/legacy/plugins/code/server/routes/status.ts +++ b/x-pack/legacy/plugins/code/server/routes/status.ts @@ -4,10 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { CodeServerRouter } from '../security'; -import { RequestFacade } from '../../'; import { LangServerType, RepoFileStatus, StatusReport } from '../../common/repo_file_status'; import { CTAGS, LanguageServerDefinition } from '../lsp/language_servers'; import { LanguageServerStatus } from '../../common/language_server'; @@ -108,18 +107,22 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices router.route({ path: '/api/code/repo/{uri*3}/status/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const report: StatusReport = {}; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); const endpoint = await codeServices.locate(req, uri); try { // Check if the repository already exists const repo = await repoObjectClient.getRepository(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri); } catch (e) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } await handleRepoStatus(endpoint, report, uri, ref, repoObjectClient); if (path) { @@ -141,10 +144,10 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices // not a file? The path may be a dir. } } catch (e) { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } - return report; + return res.ok({ body: report }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/workspace.ts b/x-pack/legacy/plugins/code/server/routes/workspace.ts index 8a112af297245..4dfafda7369c1 100644 --- a/x-pack/legacy/plugins/code/server/routes/workspace.ts +++ b/x-pack/legacy/plugins/code/server/routes/workspace.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; -import { RequestFacade, RequestQueryFacade } from '../../'; +import { RequestQueryFacade } from '../../'; import { ServerOptions } from '../server_options'; import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; @@ -23,8 +23,12 @@ export function workspaceRoute( router.route({ path: '/api/code/workspace', method: 'GET', - async handler() { - return serverOptions.repoConfigs; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + return res.ok({ body: serverOptions.repoConfigs }); }, }); @@ -32,23 +36,35 @@ export function workspaceRoute( path: '/api/code/workspace/{uri*3}/{revision}', requireAdmin: true, method: 'POST', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; - getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const revision = req.params.revision as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri, revision } = req.params as any; + getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const repoConfig = serverOptions.repoConfigs[repoUri]; const force = !!(req.query as RequestQueryFacade).force; if (repoConfig) { const endpoint = await codeServices.locate(req, repoUri); try { await workspaceService.initCmd(endpoint, { repoUri, revision, force, repoConfig }); + return res.ok(); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); + } else { + return res.customError({ + body: e.error, + statusCode: 500, + }); } } } else { - return Boom.notFound(`repo config for ${repoUri} not found.`); + return res.notFound({ body: `repo config for ${repoUri} not found.` }); } }, }); diff --git a/x-pack/legacy/plugins/code/server/search/repository_object_client.ts b/x-pack/legacy/plugins/code/server/search/repository_object_client.ts index c7deb2faa3e7f..23ae73d35b8fc 100644 --- a/x-pack/legacy/plugins/code/server/search/repository_object_client.ts +++ b/x-pack/legacy/plugins/code/server/search/repository_object_client.ts @@ -18,7 +18,6 @@ import { RepositoryIndexName, RepositoryIndexNamePrefix, RepositoryIndexStatusReservedField, - RepositoryRandomPathReservedField, RepositoryReservedField, RepositorySearchIndexWithScope, } from '../indexer/schema'; @@ -47,14 +46,6 @@ export class RepositoryObjectClient { return await this.getRepositoryObject(repoUri, RepositoryConfigReservedField); } - public async getRepository(repoUri: RepositoryUri): Promise { - return await this.getRepositoryObject(repoUri, RepositoryReservedField); - } - - public async getRepositoryRandomStr(repoUri: RepositoryUri): Promise { - return await this.getRepositoryObject(repoUri, RepositoryRandomPathReservedField); - } - public async getRepositories(uris: string[]): Promise { if (uris.length === 0) { return []; @@ -62,6 +53,10 @@ export class RepositoryObjectClient { return this.getRepositoriesInternal(RepositorySearchIndexWithScope(uris)); } + public async getRepository(repoUri: RepositoryUri): Promise { + return await this.getRepositoryObject(repoUri, RepositoryReservedField); + } + public async getAllRepositories(): Promise { return await this.getRepositoriesInternal(`${RepositoryIndexNamePrefix}*`); } @@ -107,10 +102,6 @@ export class RepositoryObjectClient { return await this.setRepositoryObject(repoUri, RepositoryConfigReservedField, config); } - public async setRepositoryRandomStr(repoUri: RepositoryUri, randomStr: string) { - return await this.setRepositoryObject(repoUri, RepositoryRandomPathReservedField, randomStr); - } - public async setRepository(repoUri: RepositoryUri, repo: Repository) { return await this.setRepositoryObject(repoUri, RepositoryReservedField, repo); } diff --git a/x-pack/legacy/plugins/code/server/security.ts b/x-pack/legacy/plugins/code/server/security.ts index c548b51940599..b511fba5af4d8 100644 --- a/x-pack/legacy/plugins/code/server/security.ts +++ b/x-pack/legacy/plugins/code/server/security.ts @@ -4,27 +4,100 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade, ServerRouteFacade, RouteOptionsFacade } from '..'; +import { schema } from '@kbn/config-schema'; + +import { IRouter, RequestHandler } from 'src/core/server'; +import { ServerRouteFacade, RouteOptionsFacade } from '..'; export class CodeServerRouter { - constructor(readonly server: ServerFacade) {} + constructor(readonly router: IRouter) {} route(route: CodeRoute) { const routeOptions: RouteOptionsFacade = (route.options || {}) as RouteOptionsFacade; - routeOptions.tags = [ + const tags = [ ...(routeOptions.tags || []), `access:code_${route.requireAdmin ? 'admin' : 'user'}`, ]; - this.server.route({ - handler: route.handler, - method: route.method, - options: routeOptions, - path: route.path, - }); + const routeHandler = route.npHandler!; + + switch ((route.method as string).toLowerCase()) { + case 'get': { + this.router.get( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'put': { + this.router.put( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + body: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'delete': { + this.router.delete( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'patch': + case 'post': { + this.router.post( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + body: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + default: { + throw new Error(`Unknown HTTP method: ${route.method}`); + } + } } } export interface CodeRoute extends ServerRouteFacade { requireAdmin?: boolean; + // New Platform Route Handler API + npHandler?: RequestHandler; } diff --git a/x-pack/legacy/plugins/code/server/server_options.ts b/x-pack/legacy/plugins/code/server/server_options.ts index f39735ce1c8fc..e267e6f9146eb 100644 --- a/x-pack/legacy/plugins/code/server/server_options.ts +++ b/x-pack/legacy/plugins/code/server/server_options.ts @@ -21,7 +21,6 @@ export interface SecurityOptions { installNodeDependency: boolean; gitHostWhitelist: string[]; gitProtocolWhitelist: string[]; - enableGitCertCheck: boolean; enableJavaSecurityManager: boolean; extraJavaRepositoryWhitelist: string[]; } diff --git a/x-pack/legacy/plugins/code/server/test_utils.ts b/x-pack/legacy/plugins/code/server/test_utils.ts index cb5f43a0933b2..6276515fae1b6 100644 --- a/x-pack/legacy/plugins/code/server/test_utils.ts +++ b/x-pack/legacy/plugins/code/server/test_utils.ts @@ -77,7 +77,6 @@ const TEST_OPTIONS = { enableMavenImport: true, enableGradleImport: true, installNodeDependency: true, - enableGitCertCheck: true, gitProtocolWhitelist: ['ssh', 'https', 'git'], enableJavaSecurityManager: true, }, diff --git a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts index 49e27cdde62b6..9dcfb543e8306 100644 --- a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts +++ b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts @@ -4,50 +4,62 @@ * you may not use this file except in compliance with the Elastic License. */ -import { AnyObject } from '../lib/esqueue'; +import { + IndicesCreateParams, + IndicesDeleteParams, + IndicesExistsParams, + IndicesExistsAliasParams, + IndicesDeleteAliasParams, + IndicesGetAliasParams, + IndicesGetMappingParams, + IndicesPutAliasParams, + IndicesUpdateAliasesParams, + IndicesRefreshParams, +} from 'elasticsearch'; + import { WithRequest } from './with_request'; import { WithInternalRequest } from './with_internal_request'; export class EsIndexClient { constructor(readonly self: WithRequest | WithInternalRequest) {} - public exists(params: AnyObject): Promise { + public exists(params: IndicesExistsParams): Promise { return this.self.callCluster('indices.exists', params); } - public create(params: AnyObject): Promise { + public create(params: IndicesCreateParams): Promise { return this.self.callCluster('indices.create', params); } - public refresh(params: AnyObject): Promise { + public refresh(params: IndicesRefreshParams): Promise { return this.self.callCluster('indices.refresh', params); } - public delete(params: AnyObject): Promise { + public delete(params: IndicesDeleteParams): Promise { return this.self.callCluster('indices.delete', params); } - public existsAlias(params: AnyObject): Promise { + public existsAlias(params: IndicesExistsAliasParams): Promise { return this.self.callCluster('indices.existsAlias', params); } - public getAlias(params: AnyObject): Promise { + public getAlias(params: IndicesGetAliasParams): Promise { return this.self.callCluster('indices.getAlias', params); } - public putAlias(params: AnyObject): Promise { + public putAlias(params: IndicesPutAliasParams): Promise { return this.self.callCluster('indices.putAlias', params); } - public deleteAlias(params: AnyObject): Promise { + public deleteAlias(params: IndicesDeleteAliasParams): Promise { return this.self.callCluster('indices.deleteAlias', params); } - public updateAliases(params: AnyObject): Promise { + public updateAliases(params: IndicesUpdateAliasesParams): Promise { return this.self.callCluster('indices.updateAliases', params); } - public getMapping(params: AnyObject): Promise { + public getMapping(params: IndicesGetMappingParams): Promise { return this.self.callCluster('indices.getMapping', params); } } diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts index 5a2cb0952e4b6..60a57f4dd26ea 100644 --- a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts @@ -4,35 +4,46 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../..'; -import { AnyObject, EsClient } from '../lib/esqueue'; +import { + BulkIndexDocumentsParams, + DeleteDocumentByQueryParams, + DeleteDocumentParams, + GetParams, + IndexDocumentParams, + ReindexParams, + SearchParams, + UpdateDocumentParams, + UpdateDocumentByQueryParams, +} from 'elasticsearch'; +import { IClusterClient } from 'src/core/server'; +import { EsClient } from '../lib/esqueue'; import { EsIndexClient } from './es_index_client'; import { WithInternalRequest } from './with_internal_request'; export class EsClientWithInternalRequest extends WithInternalRequest implements EsClient { public readonly indices = new EsIndexClient(this); - constructor(server: ServerFacade) { - super(server); + constructor(cluster: IClusterClient) { + super(cluster); } - public bulk(params: AnyObject): Promise { + public bulk(params: BulkIndexDocumentsParams): Promise { return this.callCluster('bulk', params); } - public delete(params: AnyObject): Promise { + public delete(params: DeleteDocumentParams): Promise { return this.callCluster('delete', params); } - public deleteByQuery(params: AnyObject): Promise { + public deleteByQuery(params: DeleteDocumentByQueryParams): Promise { return this.callCluster('deleteByQuery', params); } - public get(params: AnyObject): Promise { + public get(params: GetParams): Promise { return this.callCluster('get', params); } - public index(params: AnyObject): Promise { + public index(params: IndexDocumentParams): Promise { return this.callCluster('index', params); } @@ -40,19 +51,19 @@ export class EsClientWithInternalRequest extends WithInternalRequest implements return this.callCluster('ping'); } - public reindex(params: AnyObject): Promise { + public reindex(params: ReindexParams): Promise { return this.callCluster('reindex', params); } - public search(params: AnyObject): Promise { + public search(params: SearchParams): Promise { return this.callCluster('search', params); } - public update(params: AnyObject): Promise { + public update(params: UpdateDocumentParams): Promise { return this.callCluster('update', params); } - public updateByQuery(params: AnyObject): Promise { + public updateByQuery(params: UpdateDocumentByQueryParams): Promise { return this.callCluster('updateByQuery', params); } } diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts index a1f70db0a7074..2e4a18937a232 100644 --- a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../../'; +import { KibanaRequest, RequestHandlerContext } from 'src/core/server'; import { AnyObject, EsClient } from '../lib/esqueue'; import { EsIndexClient } from './es_index_client'; import { WithRequest } from './with_request'; @@ -12,8 +12,8 @@ import { WithRequest } from './with_request'; export class EsClientWithRequest extends WithRequest implements EsClient { public readonly indices = new EsIndexClient(this); - constructor(readonly req: RequestFacade) { - super(req); + constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) { + super(context, req); } public bulk(params: AnyObject): Promise { diff --git a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts index a51fa990ff10e..9f8dde129039a 100644 --- a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts @@ -4,14 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../..'; -import { AnyObject } from '../lib/esqueue'; +import { APICaller, IClusterClient } from 'src/core/server'; export class WithInternalRequest { - public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise; + public readonly callCluster: APICaller; - constructor(server: ServerFacade) { - const cluster = server.plugins.elasticsearch.getCluster('admin'); - this.callCluster = cluster.callWithInternalUser; + constructor(cluster: IClusterClient) { + this.callCluster = cluster.callAsInternalUser; } } diff --git a/x-pack/legacy/plugins/code/server/utils/with_request.ts b/x-pack/legacy/plugins/code/server/utils/with_request.ts index e08b9727f375e..e2a4bfd03de66 100644 --- a/x-pack/legacy/plugins/code/server/utils/with_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/with_request.ts @@ -4,24 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../../'; -import { AnyObject } from '../lib/esqueue'; +import { APICaller, KibanaRequest, RequestHandlerContext } from 'src/core/server'; export class WithRequest { - public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise; + public readonly callCluster: APICaller; - constructor(readonly req: RequestFacade) { - const cluster = req.server.plugins.elasticsearch.getCluster('data'); - - // @ts-ignore - const securityPlugin = req.server.plugins.security; - if (securityPlugin) { - const useRbac = securityPlugin.authorization.mode.useRbacForRequest(req); - if (useRbac) { - this.callCluster = cluster.callWithInternalUser; - return; - } - } - this.callCluster = cluster.callWithRequest.bind(null, req); + constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) { + const securityPlugin = context.code.legacy.securityPlugin; + const useRbac = + securityPlugin && + securityPlugin.authorization && + // @ts-ignore + securityPlugin.authorization.mode.useRbacForRequest(req); + this.callCluster = useRbac + ? context.core.elasticsearch.dataClient.callAsInternalUser + : context.core.elasticsearch.dataClient.callAsCurrentUser; } } diff --git a/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.pause_auto_follow_pattern.json b/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.pause_auto_follow_pattern.json new file mode 100644 index 0000000000000..84fa19b2f0131 --- /dev/null +++ b/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.pause_auto_follow_pattern.json @@ -0,0 +1,11 @@ +{ + "ccr.pause_auto_follow_pattern": { + "methods": [ + "POST" + ], + "patterns": [ + "_ccr/auto_follow/{name}/pause" + ], + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-pause-auto-follow-pattern.html" + } +} diff --git a/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.resume_auto_follow_pattern.json b/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.resume_auto_follow_pattern.json new file mode 100644 index 0000000000000..397cd826e50ac --- /dev/null +++ b/x-pack/legacy/plugins/console_extensions/spec/generated/ccr.resume_auto_follow_pattern.json @@ -0,0 +1,11 @@ +{ + "ccr.resume_auto_follow_pattern": { + "methods": [ + "POST" + ], + "patterns": [ + "_ccr/auto_follow/{name}/resume" + ], + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-resume-auto-follow-pattern.html" + } +} diff --git a/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.test.ts b/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.test.ts index 392f945ab00c5..703ba64b95a7c 100644 --- a/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.test.ts +++ b/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.test.ts @@ -318,6 +318,194 @@ describe('#bulkCreate', () => { }); }); +describe('#bulkUpdate', () => { + it('redirects request to underlying base client if type is not registered', async () => { + const attributes = { attrOne: 'one', attrSecret: 'secret', attrThree: 'three' }; + const mockedResponse = { + saved_objects: [{ id: 'some-id', type: 'unknown-type', attributes, references: [] }], + }; + + mockBaseClient.bulkUpdate.mockResolvedValue(mockedResponse); + + await expect( + wrapper.bulkUpdate( + [{ type: 'unknown-type', id: 'some-id', attributes, version: 'some-version' }], + {} + ) + ).resolves.toEqual(mockedResponse); + + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledWith( + [{ type: 'unknown-type', id: 'some-id', attributes, version: 'some-version' }], + {} + ); + }); + + it('encrypts attributes and strips them from response', async () => { + const docs = [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrSecret: 'secret', + attrThree: 'three', + }, + }, + { + id: 'some-id-2', + type: 'known-type', + attributes: { + attrOne: 'one 2', + attrSecret: 'secret 2', + attrThree: 'three 2', + }, + }, + ]; + + const mockedResponse = { + saved_objects: docs.map(doc => ({ ...doc, references: undefined })), + }; + + mockBaseClient.bulkUpdate.mockResolvedValue(mockedResponse); + + await expect(wrapper.bulkUpdate(docs.map(doc => ({ ...doc })), {})).resolves.toEqual({ + saved_objects: [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrThree: 'three', + }, + }, + { + id: 'some-id-2', + type: 'known-type', + attributes: { + attrOne: 'one 2', + attrThree: 'three 2', + }, + }, + ], + }); + + expect(encryptedSavedObjectsServiceMock.encryptAttributes).toHaveBeenCalledTimes(2); + expect(encryptedSavedObjectsServiceMock.encryptAttributes).toHaveBeenCalledWith( + { type: 'known-type', id: 'some-id' }, + { attrOne: 'one', attrSecret: 'secret', attrThree: 'three' } + ); + expect(encryptedSavedObjectsServiceMock.encryptAttributes).toHaveBeenCalledWith( + { type: 'known-type', id: 'some-id-2' }, + { attrOne: 'one 2', attrSecret: 'secret 2', attrThree: 'three 2' } + ); + + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledWith( + [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrSecret: '*secret*', + attrThree: 'three', + }, + }, + { + id: 'some-id-2', + type: 'known-type', + attributes: { + attrOne: 'one 2', + attrSecret: '*secret 2*', + attrThree: 'three 2', + }, + }, + ], + {} + ); + }); + + it('uses `namespace` to encrypt attributes if it is specified', async () => { + const docs = [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrSecret: 'secret', + attrThree: 'three', + }, + version: 'some-version', + }, + ]; + + mockBaseClient.bulkUpdate.mockResolvedValue({ + saved_objects: docs.map(doc => ({ ...doc, references: undefined })), + }); + + await expect(wrapper.bulkUpdate(docs, { namespace: 'some-namespace' })).resolves.toEqual({ + saved_objects: [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrThree: 'three', + }, + version: 'some-version', + references: undefined, + }, + ], + }); + + expect(encryptedSavedObjectsServiceMock.encryptAttributes).toHaveBeenCalledTimes(1); + expect(encryptedSavedObjectsServiceMock.encryptAttributes).toHaveBeenCalledWith( + { type: 'known-type', id: 'some-id', namespace: 'some-namespace' }, + { attrOne: 'one', attrSecret: 'secret', attrThree: 'three' } + ); + + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledWith( + [ + { + id: 'some-id', + type: 'known-type', + attributes: { + attrOne: 'one', + attrSecret: '*secret*', + attrThree: 'three', + }, + version: 'some-version', + + references: undefined, + }, + ], + { namespace: 'some-namespace' } + ); + }); + + it('fails if base client fails', async () => { + const attributes = { attrOne: 'one', attrSecret: 'secret', attrThree: 'three' }; + + const failureReason = new Error('Something bad happened...'); + mockBaseClient.bulkUpdate.mockRejectedValue(failureReason); + + await expect( + wrapper.bulkUpdate( + [{ type: 'unknown-type', id: 'some-id', attributes, version: 'some-version' }], + {} + ) + ).rejects.toThrowError(failureReason); + + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockBaseClient.bulkUpdate).toHaveBeenCalledWith( + [{ type: 'unknown-type', id: 'some-id', attributes, version: 'some-version' }], + {} + ); + }); +}); + describe('#delete', () => { it('redirects request to underlying base client if type is not registered', async () => { const options = { namespace: 'some-ns' }; diff --git a/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.ts b/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.ts index a18e691ae1a1f..7fa066aa3355c 100644 --- a/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.ts +++ b/x-pack/legacy/plugins/encrypted_saved_objects/server/lib/encrypted_saved_objects_client_wrapper.ts @@ -11,7 +11,9 @@ import { SavedObjectsBaseOptions, SavedObjectsBulkCreateObject, SavedObjectsBulkGetObject, + SavedObjectsBulkUpdateObject, SavedObjectsBulkResponse, + SavedObjectsBulkUpdateResponse, SavedObjectsClientContract, SavedObjectsCreateOptions, SavedObjectsFindOptions, @@ -110,6 +112,34 @@ export class EncryptedSavedObjectsClientWrapper implements SavedObjectsClientCon ); } + public async bulkUpdate( + objects: SavedObjectsBulkUpdateObject[], + options?: SavedObjectsBaseOptions + ) { + // We encrypt attributes for every object in parallel and that can potentially exhaust libuv or + // NodeJS thread pool. If it turns out to be a problem, we can consider switching to the + // sequential processing. + const encryptedObjects = await Promise.all( + objects.map(async object => { + const { type, id, attributes } = object; + if (!this.options.service.isRegistered(type)) { + return object; + } + return { + ...object, + attributes: await this.options.service.encryptAttributes( + { type, id, namespace: options && options.namespace }, + attributes + ), + }; + }) + ); + + return this.stripEncryptedAttributesFromBulkResponse( + await this.options.baseClient.bulkUpdate(encryptedObjects, options) + ); + } + public async delete(type: string, id: string, options?: SavedObjectsBaseOptions) { return await this.options.baseClient.delete(type, id, options); } @@ -182,7 +212,7 @@ export class EncryptedSavedObjectsClientWrapper implements SavedObjectsClientCon * @param response Raw response returned by the underlying base client. */ private stripEncryptedAttributesFromBulkResponse< - T extends SavedObjectsBulkResponse | SavedObjectsFindResponse + T extends SavedObjectsBulkResponse | SavedObjectsFindResponse | SavedObjectsBulkUpdateResponse >(response: T): T { for (const savedObject of response.saved_objects) { if (this.options.service.isRegistered(savedObject.type)) { diff --git a/x-pack/legacy/plugins/graph/public/app.js b/x-pack/legacy/plugins/graph/public/app.js index 7767721357ba6..e6bfc82f876c4 100644 --- a/x-pack/legacy/plugins/graph/public/app.js +++ b/x-pack/legacy/plugins/graph/public/app.js @@ -352,7 +352,7 @@ app.controller('graphuiPlugin', function ( } }; - function canWipeWorkspace(callback) { + function canWipeWorkspace(callback, text, options) { if (!hasFieldsSelector(store.getState())) { callback(); return; @@ -360,14 +360,15 @@ app.controller('graphuiPlugin', function ( const confirmModalOptions = { onConfirm: callback, onCancel: (() => {}), - confirmButtonText: i18n.translate('xpack.graph.clearWorkspace.confirmButtonLabel', { + confirmButtonText: i18n.translate('xpack.graph.leaveWorkspace.confirmButtonLabel', { defaultMessage: 'Leave anyway', }), - title: i18n.translate('xpack.graph.clearWorkspace.modalTitle', { + title: i18n.translate('xpack.graph.leaveWorkspace.modalTitle', { defaultMessage: 'Unsaved changes', }), + ...options, }; - confirmModal(i18n.translate('xpack.graph.clearWorkspace.confirmText', { + confirmModal(text || i18n.translate('xpack.graph.leaveWorkspace.confirmText', { defaultMessage: 'If you leave now, you will lose unsaved changes.', }), confirmModalOptions); } diff --git a/x-pack/legacy/plugins/graph/public/components/search_bar.tsx b/x-pack/legacy/plugins/graph/public/components/search_bar.tsx index 4fd1a162105f1..d8576c02d29c9 100644 --- a/x-pack/legacy/plugins/graph/public/components/search_bar.tsx +++ b/x-pack/legacy/plugins/graph/public/components/search_bar.tsx @@ -33,7 +33,11 @@ export interface OuterSearchBarProps { initialQuery?: string; onQuerySubmit: (query: string) => void; - confirmWipeWorkspace: (onConfirm: () => void) => void; + confirmWipeWorkspace: ( + onConfirm: () => void, + text?: string, + options?: { confirmButtonText: string; title: string } + ) => void; indexPatternProvider: IndexPatternProvider; } @@ -118,11 +122,27 @@ export function SearchBarComponent(props: SearchBarProps) { className="gphSearchBar__datasourceButton" data-test-subj="graphDatasourceButton" onClick={() => { - confirmWipeWorkspace(() => - openSourceModal( - { overlays, savedObjects, uiSettings }, - onIndexPatternSelected - ) + confirmWipeWorkspace( + () => + openSourceModal( + { overlays, savedObjects, uiSettings }, + onIndexPatternSelected + ), + i18n.translate('xpack.graph.clearWorkspace.confirmText', { + defaultMessage: + 'If you change data sources, your current fields and vertices will be reset.', + }), + { + confirmButtonText: i18n.translate( + 'xpack.graph.clearWorkspace.confirmButtonLabel', + { + defaultMessage: 'Change data source', + } + ), + title: i18n.translate('xpack.graph.clearWorkspace.modalTitle', { + defaultMessage: 'Unsaved changes', + }), + } ); }} > diff --git a/x-pack/legacy/plugins/index_management/public/sections/home/index_list/index_table/index_table.js b/x-pack/legacy/plugins/index_management/public/sections/home/index_list/index_table/index_table.js index f0f7a3d010f7f..0c3d998942b2b 100644 --- a/x-pack/legacy/plugins/index_management/public/sections/home/index_list/index_table/index_table.js +++ b/x-pack/legacy/plugins/index_management/public/sections/home/index_list/index_table/index_table.js @@ -337,6 +337,7 @@ export class IndexTable extends Component { const { name } = index; return ( toggleChanged(name, event.target.checked)} label={label} diff --git a/x-pack/legacy/plugins/infra/public/components/formatted_time.tsx b/x-pack/legacy/plugins/infra/public/components/formatted_time.tsx index f6a6545920fc5..78255c55df124 100644 --- a/x-pack/legacy/plugins/infra/public/components/formatted_time.tsx +++ b/x-pack/legacy/plugins/infra/public/components/formatted_time.tsx @@ -17,8 +17,25 @@ const getFormattedTime = ( return userFormat ? moment(time).format(userFormat) : moment(time).format(fallbackFormat); }; -export const useFormattedTime = (time: number, fallbackFormat?: string) => { - const [dateFormat] = useKibanaUiSetting('dateFormat'); +interface UseFormattedTimeOptions { + format?: 'dateTime' | 'time'; + fallbackFormat?: string; +} + +export const useFormattedTime = ( + time: number, + { format = 'dateTime', fallbackFormat }: UseFormattedTimeOptions = {} +) => { + // `dateFormat:scaled` is an array of `[key, format]` tuples. + // The hook might return `undefined`, so use a sane default for the `find` later. + const scaledTuples = useKibanaUiSetting('dateFormat:scaled')[0] || [['', undefined]]; + + const formatMap = { + dateTime: useKibanaUiSetting('dateFormat')[0], + time: scaledTuples.find(([key]: [string, string]) => key === '')[1], + }; + + const dateFormat = formatMap[format]; const formattedTime = useMemo(() => getFormattedTime(time, dateFormat, fallbackFormat), [ getFormattedTime, time, diff --git a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/column_headers.tsx b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/column_headers.tsx index 3d78c8d728fc6..56a84d258c907 100644 --- a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/column_headers.tsx +++ b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/column_headers.tsx @@ -5,6 +5,7 @@ */ import React from 'react'; +import { transparentize } from 'polished'; import euiStyled from '../../../../../../common/eui_styled_components'; import { @@ -20,6 +21,8 @@ import { LogEntryColumnWidths, } from './log_entry_column'; import { ASSUMED_SCROLLBAR_WIDTH } from './vertical_scroll_panel'; +import { WithLogPosition } from '../../../containers/logs/with_log_position'; +import { localizedDate } from '../../../utils/formatters/datetime'; export const LogColumnHeaders: React.FunctionComponent<{ columnConfigurations: LogColumnConfiguration[]; @@ -30,13 +33,16 @@ export const LogColumnHeaders: React.FunctionComponent<{ {columnConfigurations.map(columnConfiguration => { if (isTimestampLogColumnConfiguration(columnConfiguration)) { return ( - - Timestamp - + + {({ firstVisiblePosition }) => ( + + {firstVisiblePosition ? localizedDate(firstVisiblePosition.time) : 'Timestamp'} + + )} + ); } else if (isMessageLogColumnConfiguration(columnConfiguration)) { return ( @@ -83,13 +89,16 @@ const LogColumnHeadersWrapper = euiStyled.div.attrs({ justify-content: flex-start; overflow: hidden; padding-right: ${ASSUMED_SCROLLBAR_WIDTH}px; + border-bottom: ${props => props.theme.eui.euiBorderThin}; + box-shadow: 0 2px 2px -1px ${props => transparentize(0.3, props.theme.eui.euiColorLightShade)}; + position: relative; + z-index: 1; `; const LogColumnHeaderWrapper = LogEntryColumn.extend.attrs({ role: 'columnheader', })` align-items: center; - border-bottom: ${props => props.theme.eui.euiBorderThick}; display: flex; flex-direction: row; height: 32px; diff --git a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_date_row.tsx b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_date_row.tsx new file mode 100644 index 0000000000000..fbc450950b828 --- /dev/null +++ b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_date_row.tsx @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; +import { EuiFlexGroup, EuiFlexItem, EuiHorizontalRule, EuiTitle } from '@elastic/eui'; +import { localizedDate } from '../../../utils/formatters/datetime'; + +interface LogDateRowProps { + timestamp: number; +} + +/** + * Show a row with the date in the log stream + */ +export const LogDateRow: React.FC = ({ timestamp }) => { + const formattedDate = localizedDate(timestamp); + + return ( + + + +

{formattedDate}

+
+
+ + + +
+ ); +}; diff --git a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_entry_timestamp_column.tsx b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_entry_timestamp_column.tsx index c996342d0c060..884e5ff0a5bde 100644 --- a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_entry_timestamp_column.tsx +++ b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/log_entry_timestamp_column.tsx @@ -19,7 +19,7 @@ interface LogEntryTimestampColumnProps { export const LogEntryTimestampColumn = memo( ({ isHighlighted, isHovered, time }) => { - const formattedTime = useFormattedTime(time); + const formattedTime = useFormattedTime(time, { format: 'time' }); return ( @@ -45,11 +45,8 @@ const TimestampColumnContent = LogEntryColumnContent.extend.attrs<{ isHovered: boolean; isHighlighted: boolean; }>({})` - background-color: ${props => props.theme.eui.euiColorLightestShade}; - border-right: solid 2px ${props => props.theme.eui.euiColorLightShade}; color: ${props => props.theme.eui.euiColorDarkShade}; overflow: hidden; - text-align: right; text-overflow: clip; white-space: pre; diff --git a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/scrollable_log_text_stream_view.tsx b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/scrollable_log_text_stream_view.tsx index 3c1fb2ceec7f6..d439308194d18 100644 --- a/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/scrollable_log_text_stream_view.tsx +++ b/x-pack/legacy/plugins/infra/public/components/logging/log_text_stream/scrollable_log_text_stream_view.tsx @@ -6,7 +6,8 @@ import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; -import React, { useMemo } from 'react'; +import React, { Fragment, useMemo } from 'react'; +import moment from 'moment'; import euiStyled from '../../../../../../common/eui_styled_components'; import { TextScale } from '../../../../common/log_text_scale'; @@ -26,6 +27,7 @@ import { MeasurableItemView } from './measurable_item_view'; import { VerticalScrollPanel } from './vertical_scroll_panel'; import { getColumnWidths, LogEntryColumnWidths } from './log_entry_column'; import { useMeasuredCharacterDimensions } from './text_styles'; +import { LogDateRow } from './log_date_row'; interface ScrollableLogTextStreamViewProps { columnConfigurations: LogColumnConfiguration[]; @@ -188,35 +190,47 @@ export class ScrollableLogTextStreamView extends React.PureComponent< isStreaming={false} lastStreamingUpdate={null} /> - {items.map(item => ( - - {itemMeasureRef => ( - - )} - - ))} + {items.map((item, idx) => { + const currentTimestamp = item.logEntry.key.time; + let showDate = false; + + if (idx > 0) { + const prevTimestamp = items[idx - 1].logEntry.key.time; + showDate = !moment(currentTimestamp).isSame(prevTimestamp, 'day'); + } + + return ( + + {showDate && } + + {itemMeasureRef => ( + + )} + + + ); + })} = ({ children, columnConfigurations, scale }) => { const { CharacterDimensionsProbe, dimensions } = useMeasuredCharacterDimensions(scale); const referenceTime = useMemo(() => Date.now(), []); - const formattedCurrentDate = useFormattedTime(referenceTime); + const formattedCurrentDate = useFormattedTime(referenceTime, { format: 'time' }); const columnWidths = useMemo( () => getColumnWidths(columnConfigurations, dimensions.width, formattedCurrentDate.length), [columnConfigurations, dimensions.width, formattedCurrentDate] diff --git a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx index b1eef34001750..fe3c930f9e08e 100644 --- a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx +++ b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx @@ -9,11 +9,12 @@ import React from 'react'; import euiStyled from '../../../../../common/eui_styled_components'; interface AppNavigationProps { + 'aria-label': string; children: React.ReactNode; } -export const AppNavigation = ({ children }: AppNavigationProps) => ( -