= {}, options?: CallAPIOptions) => {
return defer(() => apiCaller(endpoint, clientParams, options))
.pipe(
diff --git a/src/core/server/http/http_server.mocks.ts b/src/core/server/http/http_server.mocks.ts
index 0a9541393284e..741c723ca9365 100644
--- a/src/core/server/http/http_server.mocks.ts
+++ b/src/core/server/http/http_server.mocks.ts
@@ -29,6 +29,7 @@ import {
RouteMethod,
KibanaResponseFactory,
RouteValidationSpec,
+ KibanaRouteState,
} from './router';
import { OnPreResponseToolkit } from './lifecycle/on_pre_response';
import { OnPostAuthToolkit } from './lifecycle/on_post_auth';
@@ -43,6 +44,7 @@ interface RequestFixtureOptions {
method?: RouteMethod;
socket?: Socket;
routeTags?: string[];
+ kibanaRouteState?: KibanaRouteState;
routeAuthRequired?: false;
validation?: {
params?: RouteValidationSpec
;
@@ -62,6 +64,7 @@ function createKibanaRequestMock
({
routeTags,
routeAuthRequired,
validation = {},
+ kibanaRouteState = { xsrfRequired: true },
}: RequestFixtureOptions
= {}) {
const queryString = stringify(query, { sort: false });
@@ -80,7 +83,7 @@ function createKibanaRequestMock
({
search: queryString ? `?${queryString}` : queryString,
},
route: {
- settings: { tags: routeTags, auth: routeAuthRequired },
+ settings: { tags: routeTags, auth: routeAuthRequired, app: kibanaRouteState },
},
raw: {
req: { socket },
@@ -109,6 +112,7 @@ function createRawRequestMock(customization: DeepPartial = {}) {
return merge(
{},
{
+ app: { xsrfRequired: true } as any,
headers: {},
path: '/',
route: { settings: {} },
diff --git a/src/core/server/http/http_server.test.ts b/src/core/server/http/http_server.test.ts
index a9fc80c86d878..27db79bb94d25 100644
--- a/src/core/server/http/http_server.test.ts
+++ b/src/core/server/http/http_server.test.ts
@@ -811,6 +811,7 @@ test('exposes route details of incoming request to a route handler', async () =>
path: '/',
options: {
authRequired: true,
+ xsrfRequired: false,
tags: [],
},
});
@@ -923,6 +924,7 @@ test('exposes route details of incoming request to a route handler (POST + paylo
path: '/',
options: {
authRequired: true,
+ xsrfRequired: true,
tags: [],
body: {
parse: true, // hapi populates the default
diff --git a/src/core/server/http/http_server.ts b/src/core/server/http/http_server.ts
index 025ab2bf56ac2..cffdffab0d0cf 100644
--- a/src/core/server/http/http_server.ts
+++ b/src/core/server/http/http_server.ts
@@ -27,7 +27,7 @@ import { adoptToHapiOnPostAuthFormat, OnPostAuthHandler } from './lifecycle/on_p
import { adoptToHapiOnPreAuthFormat, OnPreAuthHandler } from './lifecycle/on_pre_auth';
import { adoptToHapiOnPreResponseFormat, OnPreResponseHandler } from './lifecycle/on_pre_response';
-import { IRouter } from './router';
+import { IRouter, KibanaRouteState, isSafeMethod } from './router';
import {
SessionStorageCookieOptions,
createCookieSessionStorageFactory,
@@ -147,9 +147,14 @@ export class HttpServer {
for (const route of router.getRoutes()) {
this.log.debug(`registering route handler for [${route.path}]`);
// Hapi does not allow payload validation to be specified for 'head' or 'get' requests
- const validate = ['head', 'get'].includes(route.method) ? undefined : { payload: true };
+ const validate = isSafeMethod(route.method) ? undefined : { payload: true };
const { authRequired = true, tags, body = {} } = route.options;
const { accepts: allow, maxBytes, output, parse } = body;
+
+ const kibanaRouteState: KibanaRouteState = {
+ xsrfRequired: route.options.xsrfRequired ?? !isSafeMethod(route.method),
+ };
+
this.server.route({
handler: route.handler,
method: route.method,
@@ -157,6 +162,7 @@ export class HttpServer {
options: {
// Enforcing the comparison with true because plugins could overwrite the auth strategy by doing `options: { authRequired: authStrategy as any }`
auth: authRequired === true ? undefined : false,
+ app: kibanaRouteState,
tags: tags ? Array.from(tags) : undefined,
// TODO: This 'validate' section can be removed once the legacy platform is completely removed.
// We are telling Hapi that NP routes can accept any payload, so that it can bypass the default
diff --git a/src/core/server/http/index.ts b/src/core/server/http/index.ts
index d31afe1670e41..8f4c02680f8a3 100644
--- a/src/core/server/http/index.ts
+++ b/src/core/server/http/index.ts
@@ -58,6 +58,8 @@ export {
RouteValidationError,
RouteValidatorFullConfig,
RouteValidationResultFactory,
+ DestructiveRouteMethod,
+ SafeRouteMethod,
} from './router';
export { BasePathProxyServer } from './base_path_proxy_server';
export { OnPreAuthHandler, OnPreAuthToolkit } from './lifecycle/on_pre_auth';
diff --git a/src/core/server/http/integration_tests/lifecycle_handlers.test.ts b/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
index f4c5f16870c7e..b5364c616f17c 100644
--- a/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
+++ b/src/core/server/http/integration_tests/lifecycle_handlers.test.ts
@@ -36,6 +36,7 @@ const versionHeader = 'kbn-version';
const xsrfHeader = 'kbn-xsrf';
const nameHeader = 'kbn-name';
const whitelistedTestPath = '/xsrf/test/route/whitelisted';
+const xsrfDisabledTestPath = '/xsrf/test/route/disabled';
const kibanaName = 'my-kibana-name';
const setupDeps = {
context: contextServiceMock.createSetupContract(),
@@ -188,6 +189,12 @@ describe('core lifecycle handlers', () => {
return res.ok({ body: 'ok' });
}
);
+ ((router as any)[method.toLowerCase()] as RouteRegistrar)(
+ { path: xsrfDisabledTestPath, validate: false, options: { xsrfRequired: false } },
+ (context, req, res) => {
+ return res.ok({ body: 'ok' });
+ }
+ );
});
await server.start();
@@ -235,6 +242,10 @@ describe('core lifecycle handlers', () => {
it('accepts whitelisted requests without either an xsrf or version header', async () => {
await getSupertest(method.toLowerCase(), whitelistedTestPath).expect(200, 'ok');
});
+
+ it('accepts requests on a route with disabled xsrf protection', async () => {
+ await getSupertest(method.toLowerCase(), xsrfDisabledTestPath).expect(200, 'ok');
+ });
});
});
});
diff --git a/src/core/server/http/lifecycle_handlers.test.ts b/src/core/server/http/lifecycle_handlers.test.ts
index 48a6973b741ba..a80e432e0d4cb 100644
--- a/src/core/server/http/lifecycle_handlers.test.ts
+++ b/src/core/server/http/lifecycle_handlers.test.ts
@@ -24,7 +24,7 @@ import {
} from './lifecycle_handlers';
import { httpServerMock } from './http_server.mocks';
import { HttpConfig } from './http_config';
-import { KibanaRequest, RouteMethod } from './router';
+import { KibanaRequest, RouteMethod, KibanaRouteState } from './router';
const createConfig = (partial: Partial): HttpConfig => partial as HttpConfig;
@@ -32,12 +32,14 @@ const forgeRequest = ({
headers = {},
path = '/',
method = 'get',
+ kibanaRouteState,
}: Partial<{
headers: Record;
path: string;
method: RouteMethod;
+ kibanaRouteState: KibanaRouteState;
}>): KibanaRequest => {
- return httpServerMock.createKibanaRequest({ headers, path, method });
+ return httpServerMock.createKibanaRequest({ headers, path, method, kibanaRouteState });
};
describe('xsrf post-auth handler', () => {
@@ -142,6 +144,29 @@ describe('xsrf post-auth handler', () => {
expect(toolkit.next).toHaveBeenCalledTimes(1);
expect(result).toEqual('next');
});
+
+ it('accepts requests if xsrf protection on a route is disabled', () => {
+ const config = createConfig({
+ xsrf: { whitelist: [], disableProtection: false },
+ });
+ const handler = createXsrfPostAuthHandler(config);
+ const request = forgeRequest({
+ method: 'post',
+ headers: {},
+ path: '/some-path',
+ kibanaRouteState: {
+ xsrfRequired: false,
+ },
+ });
+
+ toolkit.next.mockReturnValue('next' as any);
+
+ const result = handler(request, responseFactory, toolkit);
+
+ expect(responseFactory.badRequest).not.toHaveBeenCalled();
+ expect(toolkit.next).toHaveBeenCalledTimes(1);
+ expect(result).toEqual('next');
+ });
});
});
diff --git a/src/core/server/http/lifecycle_handlers.ts b/src/core/server/http/lifecycle_handlers.ts
index ee877ee031a2b..7ef7e86326039 100644
--- a/src/core/server/http/lifecycle_handlers.ts
+++ b/src/core/server/http/lifecycle_handlers.ts
@@ -20,6 +20,7 @@
import { OnPostAuthHandler } from './lifecycle/on_post_auth';
import { OnPreResponseHandler } from './lifecycle/on_pre_response';
import { HttpConfig } from './http_config';
+import { isSafeMethod } from './router';
import { Env } from '../config';
import { LifecycleRegistrar } from './http_server';
@@ -31,15 +32,18 @@ export const createXsrfPostAuthHandler = (config: HttpConfig): OnPostAuthHandler
const { whitelist, disableProtection } = config.xsrf;
return (request, response, toolkit) => {
- if (disableProtection || whitelist.includes(request.route.path)) {
+ if (
+ disableProtection ||
+ whitelist.includes(request.route.path) ||
+ request.route.options.xsrfRequired === false
+ ) {
return toolkit.next();
}
- const isSafeMethod = request.route.method === 'get' || request.route.method === 'head';
const hasVersionHeader = VERSION_HEADER in request.headers;
const hasXsrfHeader = XSRF_HEADER in request.headers;
- if (!isSafeMethod && !hasVersionHeader && !hasXsrfHeader) {
+ if (!isSafeMethod(request.route.method) && !hasVersionHeader && !hasXsrfHeader) {
return response.badRequest({ body: `Request must contain a ${XSRF_HEADER} header.` });
}
diff --git a/src/core/server/http/router/index.ts b/src/core/server/http/router/index.ts
index 32663d1513f36..d254f391ca5e4 100644
--- a/src/core/server/http/router/index.ts
+++ b/src/core/server/http/router/index.ts
@@ -24,16 +24,20 @@ export {
KibanaRequestEvents,
KibanaRequestRoute,
KibanaRequestRouteOptions,
+ KibanaRouteState,
isRealRequest,
LegacyRequest,
ensureRawRequest,
} from './request';
export {
+ DestructiveRouteMethod,
+ isSafeMethod,
RouteMethod,
RouteConfig,
RouteConfigOptions,
RouteContentType,
RouteConfigOptionsBody,
+ SafeRouteMethod,
validBodyOutput,
} from './route';
export { HapiResponseAdapter } from './response_adapter';
diff --git a/src/core/server/http/router/request.ts b/src/core/server/http/router/request.ts
index 703571ba53c0a..bb2db6367f701 100644
--- a/src/core/server/http/router/request.ts
+++ b/src/core/server/http/router/request.ts
@@ -18,18 +18,24 @@
*/
import { Url } from 'url';
-import { Request } from 'hapi';
+import { Request, ApplicationState } from 'hapi';
import { Observable, fromEvent, merge } from 'rxjs';
import { shareReplay, first, takeUntil } from 'rxjs/operators';
import { deepFreeze, RecursiveReadonly } from '../../../utils';
import { Headers } from './headers';
-import { RouteMethod, RouteConfigOptions, validBodyOutput } from './route';
+import { RouteMethod, RouteConfigOptions, validBodyOutput, isSafeMethod } from './route';
import { KibanaSocket, IKibanaSocket } from './socket';
import { RouteValidator, RouteValidatorFullConfig } from './validator';
const requestSymbol = Symbol('request');
+/**
+ * @internal
+ */
+export interface KibanaRouteState extends ApplicationState {
+ xsrfRequired: boolean;
+}
/**
* Route options: If 'GET' or 'OPTIONS' method, body options won't be returned.
* @public
@@ -184,8 +190,10 @@ export class KibanaRequest<
const options = ({
authRequired: request.route.settings.auth !== false,
+ // some places in LP call KibanaRequest.from(request) manually. remove fallback to true before v8
+ xsrfRequired: (request.route.settings.app as KibanaRouteState)?.xsrfRequired ?? true,
tags: request.route.settings.tags || [],
- body: ['get', 'options'].includes(method)
+ body: isSafeMethod(method)
? undefined
: {
parse,
diff --git a/src/core/server/http/router/route.ts b/src/core/server/http/router/route.ts
index 4439a80b1eac7..d1458ef4ad063 100644
--- a/src/core/server/http/router/route.ts
+++ b/src/core/server/http/router/route.ts
@@ -19,11 +19,27 @@
import { RouteValidatorFullConfig } from './validator';
+export function isSafeMethod(method: RouteMethod): method is SafeRouteMethod {
+ return method === 'get' || method === 'options';
+}
+
+/**
+ * Set of HTTP methods changing the state of the server.
+ * @public
+ */
+export type DestructiveRouteMethod = 'post' | 'put' | 'delete' | 'patch';
+
+/**
+ * Set of HTTP methods not changing the state of the server.
+ * @public
+ */
+export type SafeRouteMethod = 'get' | 'options';
+
/**
* The set of common HTTP methods supported by Kibana routing.
* @public
*/
-export type RouteMethod = 'get' | 'post' | 'put' | 'delete' | 'patch' | 'options';
+export type RouteMethod = SafeRouteMethod | DestructiveRouteMethod;
/**
* The set of valid body.output
@@ -108,6 +124,15 @@ export interface RouteConfigOptions {
*/
authRequired?: boolean;
+ /**
+ * Defines xsrf protection requirements for a route:
+ * - true. Requires an incoming POST/PUT/DELETE request to contain `kbn-xsrf` header.
+ * - false. Disables xsrf protection.
+ *
+ * Set to true by default
+ */
+ xsrfRequired?: Method extends 'get' ? never : boolean;
+
/**
* Additional metadata tag strings to attach to the route.
*/
diff --git a/src/core/server/index.ts b/src/core/server/index.ts
index e45d4f28edcc3..7d856ae101179 100644
--- a/src/core/server/index.ts
+++ b/src/core/server/index.ts
@@ -159,6 +159,8 @@ export {
SessionStorageCookieOptions,
SessionCookieValidationResult,
SessionStorageFactory,
+ DestructiveRouteMethod,
+ SafeRouteMethod,
} from './http';
export { RenderingServiceSetup, IRenderOptions } from './rendering';
export { Logger, LoggerFactory, LogMeta, LogRecord, LogLevel } from './logging';
@@ -229,6 +231,9 @@ export {
SavedObjectsType,
SavedObjectMigrationMap,
SavedObjectMigrationFn,
+ exportSavedObjectsToStream,
+ importSavedObjectsFromStream,
+ resolveSavedObjectsImportErrors,
} from './saved_objects';
export {
@@ -245,6 +250,14 @@ export {
StringValidationRegexString,
} from './ui_settings';
+export {
+ OpsMetrics,
+ OpsOsMetrics,
+ OpsServerMetrics,
+ OpsProcessMetrics,
+ MetricsServiceSetup,
+} from './metrics';
+
export { RecursiveReadonly } from '../utils';
export {
diff --git a/src/core/server/internal_types.ts b/src/core/server/internal_types.ts
index ff68d1544d119..37d1061dc618d 100644
--- a/src/core/server/internal_types.ts
+++ b/src/core/server/internal_types.ts
@@ -30,6 +30,7 @@ import {
} from './saved_objects';
import { InternalUiSettingsServiceSetup, InternalUiSettingsServiceStart } from './ui_settings';
import { UuidServiceSetup } from './uuid';
+import { InternalMetricsServiceSetup } from './metrics';
/** @internal */
export interface InternalCoreSetup {
@@ -40,6 +41,7 @@ export interface InternalCoreSetup {
uiSettings: InternalUiSettingsServiceSetup;
savedObjects: InternalSavedObjectsServiceSetup;
uuid: UuidServiceSetup;
+ metrics: InternalMetricsServiceSetup;
}
/**
diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts
index 46436461505c0..50468db8a504d 100644
--- a/src/core/server/legacy/legacy_service.test.ts
+++ b/src/core/server/legacy/legacy_service.test.ts
@@ -43,6 +43,7 @@ import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.
import { capabilitiesServiceMock } from '../capabilities/capabilities_service.mock';
import { setupMock as renderingServiceMock } from '../rendering/__mocks__/rendering_service';
import { uuidServiceMock } from '../uuid/uuid_service.mock';
+import { metricsServiceMock } from '../metrics/metrics_service.mock';
import { findLegacyPluginSpecs } from './plugins';
import { LegacyVars, LegacyServiceSetupDeps, LegacyServiceStartDeps } from './types';
import { LegacyService } from './legacy_service';
@@ -93,6 +94,7 @@ beforeEach(() => {
},
},
rendering: renderingServiceMock,
+ metrics: metricsServiceMock.createInternalSetupContract(),
uuid: uuidSetup,
},
plugins: { 'plugin-id': 'plugin-value' },
diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts
index 44f77b5ad215e..ca83a287c57e6 100644
--- a/src/core/server/legacy/legacy_service.ts
+++ b/src/core/server/legacy/legacy_service.ts
@@ -300,6 +300,7 @@ export class LegacyService implements CoreService {
setClientFactoryProvider: setupDeps.core.savedObjects.setClientFactoryProvider,
addClientWrapper: setupDeps.core.savedObjects.addClientWrapper,
registerType: setupDeps.core.savedObjects.registerType,
+ getImportExportObjectLimit: setupDeps.core.savedObjects.getImportExportObjectLimit,
},
uiSettings: {
register: setupDeps.core.uiSettings.register,
diff --git a/src/legacy/core_plugins/visualizations/public/legacy_imports.ts b/src/core/server/metrics/collectors/index.ts
similarity index 76%
rename from src/legacy/core_plugins/visualizations/public/legacy_imports.ts
rename to src/core/server/metrics/collectors/index.ts
index 0a3b1938436c0..f58ab02e63881 100644
--- a/src/legacy/core_plugins/visualizations/public/legacy_imports.ts
+++ b/src/core/server/metrics/collectors/index.ts
@@ -17,11 +17,7 @@
* under the License.
*/
-export {
- IAggConfig,
- IAggConfigs,
- isDateHistogramBucketAggConfig,
- setBounds,
-} from '../../data/public';
-export { createAggConfigs } from 'ui/agg_types';
-export { createSavedSearchesLoader } from '../../../../plugins/discover/public';
+export { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics, MetricsCollector } from './types';
+export { OsMetricsCollector } from './os';
+export { ProcessMetricsCollector } from './process';
+export { ServerMetricsCollector } from './server';
diff --git a/src/core/server/metrics/collectors/os.test.ts b/src/core/server/metrics/collectors/os.test.ts
new file mode 100644
index 0000000000000..7d5a6da90b7d6
--- /dev/null
+++ b/src/core/server/metrics/collectors/os.test.ts
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+jest.mock('getos', () => (cb: Function) => cb(null, { dist: 'distrib', release: 'release' }));
+
+import os from 'os';
+import { OsMetricsCollector } from './os';
+
+describe('OsMetricsCollector', () => {
+ let collector: OsMetricsCollector;
+
+ beforeEach(() => {
+ collector = new OsMetricsCollector();
+ });
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ });
+
+ it('collects platform info from the os package', async () => {
+ const platform = 'darwin';
+ const release = '10.14.1';
+
+ jest.spyOn(os, 'platform').mockImplementation(() => platform);
+ jest.spyOn(os, 'release').mockImplementation(() => release);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.platform).toBe(platform);
+ expect(metrics.platformRelease).toBe(`${platform}-${release}`);
+ });
+
+ it('collects distribution info when platform is linux', async () => {
+ const platform = 'linux';
+
+ jest.spyOn(os, 'platform').mockImplementation(() => platform);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.distro).toBe('distrib');
+ expect(metrics.distroRelease).toBe('distrib-release');
+ });
+
+ it('collects memory info from the os package', async () => {
+ const totalMemory = 1457886;
+ const freeMemory = 456786;
+
+ jest.spyOn(os, 'totalmem').mockImplementation(() => totalMemory);
+ jest.spyOn(os, 'freemem').mockImplementation(() => freeMemory);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.memory.total_in_bytes).toBe(totalMemory);
+ expect(metrics.memory.free_in_bytes).toBe(freeMemory);
+ expect(metrics.memory.used_in_bytes).toBe(totalMemory - freeMemory);
+ });
+
+ it('collects uptime info from the os package', async () => {
+ const uptime = 325;
+
+ jest.spyOn(os, 'uptime').mockImplementation(() => uptime);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.uptime_in_millis).toBe(uptime * 1000);
+ });
+
+ it('collects load info from the os package', async () => {
+ const oneMinLoad = 1;
+ const fiveMinLoad = 2;
+ const fifteenMinLoad = 3;
+
+ jest.spyOn(os, 'loadavg').mockImplementation(() => [oneMinLoad, fiveMinLoad, fifteenMinLoad]);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.load).toEqual({
+ '1m': oneMinLoad,
+ '5m': fiveMinLoad,
+ '15m': fifteenMinLoad,
+ });
+ });
+});
diff --git a/src/core/server/metrics/collectors/os.ts b/src/core/server/metrics/collectors/os.ts
new file mode 100644
index 0000000000000..d3d9bb0be86fa
--- /dev/null
+++ b/src/core/server/metrics/collectors/os.ts
@@ -0,0 +1,60 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import os from 'os';
+import getosAsync, { LinuxOs } from 'getos';
+import { promisify } from 'util';
+import { OpsOsMetrics, MetricsCollector } from './types';
+
+const getos = promisify(getosAsync);
+
+export class OsMetricsCollector implements MetricsCollector {
+ public async collect(): Promise {
+ const platform = os.platform();
+ const load = os.loadavg();
+
+ const metrics: OpsOsMetrics = {
+ platform,
+ platformRelease: `${platform}-${os.release()}`,
+ load: {
+ '1m': load[0],
+ '5m': load[1],
+ '15m': load[2],
+ },
+ memory: {
+ total_in_bytes: os.totalmem(),
+ free_in_bytes: os.freemem(),
+ used_in_bytes: os.totalmem() - os.freemem(),
+ },
+ uptime_in_millis: os.uptime() * 1000,
+ };
+
+ if (platform === 'linux') {
+ try {
+ const distro = (await getos()) as LinuxOs;
+ metrics.distro = distro.dist;
+ metrics.distroRelease = `${distro.dist}-${distro.release}`;
+ } catch (e) {
+ // ignore errors
+ }
+ }
+
+ return metrics;
+ }
+}
diff --git a/src/core/server/metrics/collectors/process.test.ts b/src/core/server/metrics/collectors/process.test.ts
new file mode 100644
index 0000000000000..a437d799371f1
--- /dev/null
+++ b/src/core/server/metrics/collectors/process.test.ts
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import v8, { HeapInfo } from 'v8';
+import { ProcessMetricsCollector } from './process';
+
+describe('ProcessMetricsCollector', () => {
+ let collector: ProcessMetricsCollector;
+
+ beforeEach(() => {
+ collector = new ProcessMetricsCollector();
+ });
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ });
+
+ it('collects pid from the process', async () => {
+ const metrics = await collector.collect();
+
+ expect(metrics.pid).toEqual(process.pid);
+ });
+
+ it('collects event loop delay', async () => {
+ const metrics = await collector.collect();
+
+ expect(metrics.event_loop_delay).toBeGreaterThan(0);
+ });
+
+ it('collects uptime info from the process', async () => {
+ const uptime = 58986;
+ jest.spyOn(process, 'uptime').mockImplementation(() => uptime);
+
+ const metrics = await collector.collect();
+
+ expect(metrics.uptime_in_millis).toEqual(uptime * 1000);
+ });
+
+ it('collects memory info from the process', async () => {
+ const heapTotal = 58986;
+ const heapUsed = 4688;
+ const heapSizeLimit = 5788;
+ const rss = 5865;
+ jest.spyOn(process, 'memoryUsage').mockImplementation(() => ({
+ rss,
+ heapTotal,
+ heapUsed,
+ external: 0,
+ }));
+
+ jest.spyOn(v8, 'getHeapStatistics').mockImplementation(
+ () =>
+ ({
+ heap_size_limit: heapSizeLimit,
+ } as HeapInfo)
+ );
+
+ const metrics = await collector.collect();
+
+ expect(metrics.memory.heap.total_in_bytes).toEqual(heapTotal);
+ expect(metrics.memory.heap.used_in_bytes).toEqual(heapUsed);
+ expect(metrics.memory.heap.size_limit).toEqual(heapSizeLimit);
+ expect(metrics.memory.resident_set_size_in_bytes).toEqual(rss);
+ });
+});
diff --git a/src/core/server/metrics/collectors/process.ts b/src/core/server/metrics/collectors/process.ts
new file mode 100644
index 0000000000000..aa68abaf74e41
--- /dev/null
+++ b/src/core/server/metrics/collectors/process.ts
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import v8 from 'v8';
+import { Bench } from 'hoek';
+import { OpsProcessMetrics, MetricsCollector } from './types';
+
+export class ProcessMetricsCollector implements MetricsCollector {
+ public async collect(): Promise {
+ const heapStats = v8.getHeapStatistics();
+ const memoryUsage = process.memoryUsage();
+ const [eventLoopDelay] = await Promise.all([getEventLoopDelay()]);
+ return {
+ memory: {
+ heap: {
+ total_in_bytes: memoryUsage.heapTotal,
+ used_in_bytes: memoryUsage.heapUsed,
+ size_limit: heapStats.heap_size_limit,
+ },
+ resident_set_size_in_bytes: memoryUsage.rss,
+ },
+ pid: process.pid,
+ event_loop_delay: eventLoopDelay,
+ uptime_in_millis: process.uptime() * 1000,
+ };
+ }
+}
+
+const getEventLoopDelay = (): Promise => {
+ const bench = new Bench();
+ return new Promise(resolve => {
+ setImmediate(() => {
+ return resolve(bench.elapsed());
+ });
+ });
+};
diff --git a/src/core/server/metrics/collectors/server.ts b/src/core/server/metrics/collectors/server.ts
new file mode 100644
index 0000000000000..e46ac2f653df6
--- /dev/null
+++ b/src/core/server/metrics/collectors/server.ts
@@ -0,0 +1,80 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { ResponseObject, Server as HapiServer } from 'hapi';
+import { OpsServerMetrics, MetricsCollector } from './types';
+
+interface ServerResponseTime {
+ count: number;
+ total: number;
+ max: number;
+}
+
+export class ServerMetricsCollector implements MetricsCollector {
+ private readonly requests: OpsServerMetrics['requests'] = {
+ disconnects: 0,
+ total: 0,
+ statusCodes: {},
+ };
+ private readonly responseTimes: ServerResponseTime = {
+ count: 0,
+ total: 0,
+ max: 0,
+ };
+
+ constructor(private readonly server: HapiServer) {
+ this.server.ext('onRequest', (request, h) => {
+ this.requests.total++;
+ request.events.once('disconnect', () => {
+ this.requests.disconnects++;
+ });
+ return h.continue;
+ });
+ this.server.events.on('response', request => {
+ const statusCode = (request.response as ResponseObject)?.statusCode;
+ if (statusCode) {
+ if (!this.requests.statusCodes[statusCode]) {
+ this.requests.statusCodes[statusCode] = 0;
+ }
+ this.requests.statusCodes[statusCode]++;
+ }
+
+ const duration = Date.now() - request.info.received;
+ this.responseTimes.count++;
+ this.responseTimes.total += duration;
+ this.responseTimes.max = Math.max(this.responseTimes.max, duration);
+ });
+ }
+
+ public async collect(): Promise {
+ const connections = await new Promise(resolve => {
+ this.server.listener.getConnections((_, count) => {
+ resolve(count);
+ });
+ });
+
+ return {
+ requests: this.requests,
+ response_times: {
+ avg_in_millis: this.responseTimes.total / Math.max(this.responseTimes.count, 1),
+ max_in_millis: this.responseTimes.max,
+ },
+ concurrent_connections: connections,
+ };
+ }
+}
diff --git a/src/core/server/metrics/collectors/types.ts b/src/core/server/metrics/collectors/types.ts
new file mode 100644
index 0000000000000..5a83bc70af3c1
--- /dev/null
+++ b/src/core/server/metrics/collectors/types.ts
@@ -0,0 +1,110 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/** Base interface for all metrics gatherers */
+export interface MetricsCollector {
+ collect(): Promise;
+}
+
+/**
+ * Process related metrics
+ * @public
+ */
+export interface OpsProcessMetrics {
+ /** process memory usage */
+ memory: {
+ /** heap memory usage */
+ heap: {
+ /** total heap available */
+ total_in_bytes: number;
+ /** used heap */
+ used_in_bytes: number;
+ /** v8 heap size limit */
+ size_limit: number;
+ };
+ /** node rss */
+ resident_set_size_in_bytes: number;
+ };
+ /** node event loop delay */
+ event_loop_delay: number;
+ /** pid of the kibana process */
+ pid: number;
+ /** uptime of the kibana process */
+ uptime_in_millis: number;
+}
+
+/**
+ * OS related metrics
+ * @public
+ */
+export interface OpsOsMetrics {
+ /** The os platform */
+ platform: NodeJS.Platform;
+ /** The os platform release, prefixed by the platform name */
+ platformRelease: string;
+ /** The os distrib. Only present for linux platforms */
+ distro?: string;
+ /** The os distrib release, prefixed by the os distrib. Only present for linux platforms */
+ distroRelease?: string;
+ /** cpu load metrics */
+ load: {
+ /** load for last minute */
+ '1m': number;
+ /** load for last 5 minutes */
+ '5m': number;
+ /** load for last 15 minutes */
+ '15m': number;
+ };
+ /** system memory usage metrics */
+ memory: {
+ /** total memory available */
+ total_in_bytes: number;
+ /** current free memory */
+ free_in_bytes: number;
+ /** current used memory */
+ used_in_bytes: number;
+ };
+ /** the OS uptime */
+ uptime_in_millis: number;
+}
+
+/**
+ * server related metrics
+ * @public
+ */
+export interface OpsServerMetrics {
+ /** server response time stats */
+ response_times: {
+ /** average response time */
+ avg_in_millis: number;
+ /** maximum response time */
+ max_in_millis: number;
+ };
+ /** server requests stats */
+ requests: {
+ /** number of disconnected requests since startup */
+ disconnects: number;
+ /** total number of requests handled since startup */
+ total: number;
+ /** number of request handled per response status code */
+ statusCodes: Record;
+ };
+ /** number of current concurrent connections to the server */
+ concurrent_connections: number;
+}
diff --git a/src/core/server/metrics/index.ts b/src/core/server/metrics/index.ts
new file mode 100644
index 0000000000000..fdcf637c0cd7b
--- /dev/null
+++ b/src/core/server/metrics/index.ts
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export {
+ InternalMetricsServiceStart,
+ InternalMetricsServiceSetup,
+ MetricsServiceSetup,
+ MetricsServiceStart,
+ OpsMetrics,
+} from './types';
+export { OpsProcessMetrics, OpsServerMetrics, OpsOsMetrics } from './collectors';
+export { MetricsService } from './metrics_service';
+export { opsConfig } from './ops_config';
diff --git a/src/core/server/metrics/integration_tests/server_collector.test.ts b/src/core/server/metrics/integration_tests/server_collector.test.ts
new file mode 100644
index 0000000000000..6baf95894b9b4
--- /dev/null
+++ b/src/core/server/metrics/integration_tests/server_collector.test.ts
@@ -0,0 +1,203 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { BehaviorSubject, Subject } from 'rxjs';
+import { take, filter } from 'rxjs/operators';
+import supertest from 'supertest';
+import { Server as HapiServer } from 'hapi';
+import { createHttpServer } from '../../http/test_utils';
+import { HttpService, IRouter } from '../../http';
+import { contextServiceMock } from '../../context/context_service.mock';
+import { ServerMetricsCollector } from '../collectors/server';
+
+const requestWaitDelay = 25;
+
+describe('ServerMetricsCollector', () => {
+ let server: HttpService;
+ let collector: ServerMetricsCollector;
+ let hapiServer: HapiServer;
+ let router: IRouter;
+
+ const delay = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
+ const sendGet = (path: string) => supertest(hapiServer.listener).get(path);
+
+ beforeEach(async () => {
+ server = createHttpServer();
+ const contextSetup = contextServiceMock.createSetupContract();
+ const httpSetup = await server.setup({ context: contextSetup });
+ hapiServer = httpSetup.server;
+ router = httpSetup.createRouter('/');
+ collector = new ServerMetricsCollector(hapiServer);
+ });
+
+ afterEach(async () => {
+ await server.stop();
+ });
+
+ it('collect requests infos', async () => {
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ let metrics = await collector.collect();
+
+ expect(metrics.requests).toEqual({
+ total: 0,
+ disconnects: 0,
+ statusCodes: {},
+ });
+
+ await sendGet('/');
+ await sendGet('/');
+ await sendGet('/not-found');
+
+ metrics = await collector.collect();
+
+ expect(metrics.requests).toEqual({
+ total: 3,
+ disconnects: 0,
+ statusCodes: {
+ '200': 2,
+ '404': 1,
+ },
+ });
+ });
+
+ it('collect disconnects requests infos', async () => {
+ const never = new Promise(resolve => undefined);
+ const hitSubject = new BehaviorSubject(0);
+
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/disconnect', validate: false }, async (ctx, req, res) => {
+ hitSubject.next(hitSubject.value + 1);
+ await never;
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ await sendGet('/');
+ const discoReq1 = sendGet('/disconnect').end();
+ const discoReq2 = sendGet('/disconnect').end();
+
+ await hitSubject
+ .pipe(
+ filter(count => count >= 2),
+ take(1)
+ )
+ .toPromise();
+
+ let metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 0,
+ })
+ );
+
+ discoReq1.abort();
+ await delay(requestWaitDelay);
+
+ metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 1,
+ })
+ );
+
+ discoReq2.abort();
+ await delay(requestWaitDelay);
+
+ metrics = await collector.collect();
+ expect(metrics.requests).toEqual(
+ expect.objectContaining({
+ total: 3,
+ disconnects: 2,
+ })
+ );
+ });
+
+ it('collect response times', async () => {
+ router.get({ path: '/no-delay', validate: false }, async (ctx, req, res) => {
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/500-ms', validate: false }, async (ctx, req, res) => {
+ await delay(500);
+ return res.ok({ body: '' });
+ });
+ router.get({ path: '/250-ms', validate: false }, async (ctx, req, res) => {
+ await delay(250);
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ await Promise.all([sendGet('/no-delay'), sendGet('/250-ms')]);
+ let metrics = await collector.collect();
+
+ expect(metrics.response_times.avg_in_millis).toBeGreaterThanOrEqual(125);
+ expect(metrics.response_times.max_in_millis).toBeGreaterThanOrEqual(250);
+
+ await Promise.all([sendGet('/500-ms'), sendGet('/500-ms')]);
+ metrics = await collector.collect();
+
+ expect(metrics.response_times.avg_in_millis).toBeGreaterThanOrEqual(250);
+ expect(metrics.response_times.max_in_millis).toBeGreaterThanOrEqual(500);
+ });
+
+ it('collect connection count', async () => {
+ const waitSubject = new Subject();
+ const hitSubject = new BehaviorSubject(0);
+
+ router.get({ path: '/', validate: false }, async (ctx, req, res) => {
+ hitSubject.next(hitSubject.value + 1);
+ await waitSubject.pipe(take(1)).toPromise();
+ return res.ok({ body: '' });
+ });
+ await server.start();
+
+ const waitForHits = (hits: number) =>
+ hitSubject
+ .pipe(
+ filter(count => count >= hits),
+ take(1)
+ )
+ .toPromise();
+
+ let metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(0);
+
+ sendGet('/').end(() => null);
+ await waitForHits(1);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(1);
+
+ sendGet('/').end(() => null);
+ await waitForHits(2);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(2);
+
+ waitSubject.next('go');
+ await delay(requestWaitDelay);
+ metrics = await collector.collect();
+ expect(metrics.concurrent_connections).toEqual(0);
+ });
+});
diff --git a/src/core/server/metrics/metrics_service.mock.ts b/src/core/server/metrics/metrics_service.mock.ts
new file mode 100644
index 0000000000000..cc53a4e27d571
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.mock.ts
@@ -0,0 +1,67 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { MetricsService } from './metrics_service';
+import {
+ InternalMetricsServiceSetup,
+ InternalMetricsServiceStart,
+ MetricsServiceSetup,
+ MetricsServiceStart,
+} from './types';
+
+const createSetupContractMock = () => {
+ const setupContract: jest.Mocked = {
+ getOpsMetrics$: jest.fn(),
+ };
+ return setupContract;
+};
+
+const createInternalSetupContractMock = () => {
+ const setupContract: jest.Mocked = createSetupContractMock();
+ return setupContract;
+};
+
+const createStartContractMock = () => {
+ const startContract: jest.Mocked = {};
+ return startContract;
+};
+
+const createInternalStartContractMock = () => {
+ const startContract: jest.Mocked = createStartContractMock();
+ return startContract;
+};
+
+type MetricsServiceContract = PublicMethodsOf;
+
+const createMock = () => {
+ const mocked: jest.Mocked = {
+ setup: jest.fn().mockReturnValue(createInternalSetupContractMock()),
+ start: jest.fn().mockReturnValue(createInternalStartContractMock()),
+ stop: jest.fn(),
+ };
+ return mocked;
+};
+
+export const metricsServiceMock = {
+ create: createMock,
+ createSetupContract: createSetupContractMock,
+ createStartContract: createStartContractMock,
+ createInternalSetupContract: createInternalSetupContractMock,
+ createInternalStartContract: createInternalStartContractMock,
+};
diff --git a/src/core/server/metrics/metrics_service.test.mocks.ts b/src/core/server/metrics/metrics_service.test.mocks.ts
new file mode 100644
index 0000000000000..8e91775283042
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.test.mocks.ts
@@ -0,0 +1,25 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const mockOpsCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./ops_metrics_collector', () => ({
+ OpsMetricsCollector: jest.fn().mockImplementation(() => mockOpsCollector),
+}));
diff --git a/src/core/server/metrics/metrics_service.test.ts b/src/core/server/metrics/metrics_service.test.ts
new file mode 100644
index 0000000000000..10d6761adbe7d
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.test.ts
@@ -0,0 +1,134 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import moment from 'moment';
+import { mockOpsCollector } from './metrics_service.test.mocks';
+import { MetricsService } from './metrics_service';
+import { mockCoreContext } from '../core_context.mock';
+import { configServiceMock } from '../config/config_service.mock';
+import { httpServiceMock } from '../http/http_service.mock';
+import { take } from 'rxjs/operators';
+
+const testInterval = 100;
+
+const dummyMetrics = { metricA: 'value', metricB: 'otherValue' };
+
+describe('MetricsService', () => {
+ const httpMock = httpServiceMock.createSetupContract();
+ let metricsService: MetricsService;
+
+ beforeEach(() => {
+ jest.useFakeTimers();
+
+ const configService = configServiceMock.create({
+ atPath: { interval: moment.duration(testInterval) },
+ });
+ const coreContext = mockCoreContext.create({ configService });
+ metricsService = new MetricsService(coreContext);
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ jest.clearAllTimers();
+ });
+
+ describe('#start', () => {
+ it('invokes setInterval with the configured interval', async () => {
+ await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ expect(setInterval).toHaveBeenCalledTimes(1);
+ expect(setInterval).toHaveBeenCalledWith(expect.any(Function), testInterval);
+ });
+
+ it('emits the metrics at start', async () => {
+ mockOpsCollector.collect.mockResolvedValue(dummyMetrics);
+
+ const { getOpsMetrics$ } = await metricsService.setup({
+ http: httpMock,
+ });
+
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+ expect(
+ await getOpsMetrics$()
+ .pipe(take(1))
+ .toPromise()
+ ).toEqual(dummyMetrics);
+ });
+
+ it('collects the metrics at every interval', async () => {
+ mockOpsCollector.collect.mockResolvedValue(dummyMetrics);
+
+ await metricsService.setup({ http: httpMock });
+
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(3);
+ });
+
+ it('throws when called before setup', async () => {
+ await expect(metricsService.start()).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"#setup() needs to be run first"`
+ );
+ });
+ });
+
+ describe('#stop', () => {
+ it('stops the metrics interval', async () => {
+ const { getOpsMetrics$ } = await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ await metricsService.stop();
+ jest.advanceTimersByTime(10 * testInterval);
+ expect(mockOpsCollector.collect).toHaveBeenCalledTimes(2);
+
+ getOpsMetrics$().subscribe({ complete: () => {} });
+ });
+
+ it('completes the metrics observable', async () => {
+ const { getOpsMetrics$ } = await metricsService.setup({ http: httpMock });
+ await metricsService.start();
+
+ let completed = false;
+
+ getOpsMetrics$().subscribe({
+ complete: () => {
+ completed = true;
+ },
+ });
+
+ await metricsService.stop();
+
+ expect(completed).toEqual(true);
+ });
+ });
+});
diff --git a/src/core/server/metrics/metrics_service.ts b/src/core/server/metrics/metrics_service.ts
new file mode 100644
index 0000000000000..1aed89a4aad60
--- /dev/null
+++ b/src/core/server/metrics/metrics_service.ts
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { ReplaySubject } from 'rxjs';
+import { first, shareReplay } from 'rxjs/operators';
+import { CoreService } from '../../types';
+import { CoreContext } from '../core_context';
+import { Logger } from '../logging';
+import { InternalHttpServiceSetup } from '../http';
+import { InternalMetricsServiceSetup, InternalMetricsServiceStart, OpsMetrics } from './types';
+import { OpsMetricsCollector } from './ops_metrics_collector';
+import { opsConfig, OpsConfigType } from './ops_config';
+
+interface MetricsServiceSetupDeps {
+ http: InternalHttpServiceSetup;
+}
+
+/** @internal */
+export class MetricsService
+ implements CoreService {
+ private readonly logger: Logger;
+ private metricsCollector?: OpsMetricsCollector;
+ private collectInterval?: NodeJS.Timeout;
+ private metrics$ = new ReplaySubject(1);
+
+ constructor(private readonly coreContext: CoreContext) {
+ this.logger = coreContext.logger.get('metrics');
+ }
+
+ public async setup({ http }: MetricsServiceSetupDeps): Promise {
+ this.metricsCollector = new OpsMetricsCollector(http.server);
+
+ const metricsObservable = this.metrics$.pipe(shareReplay(1));
+
+ return {
+ getOpsMetrics$: () => metricsObservable,
+ };
+ }
+
+ public async start(): Promise {
+ if (!this.metricsCollector) {
+ throw new Error('#setup() needs to be run first');
+ }
+ const config = await this.coreContext.configService
+ .atPath(opsConfig.path)
+ .pipe(first())
+ .toPromise();
+
+ await this.refreshMetrics();
+
+ this.collectInterval = setInterval(() => {
+ this.refreshMetrics();
+ }, config.interval.asMilliseconds());
+
+ return {};
+ }
+
+ private async refreshMetrics() {
+ this.logger.debug('Refreshing metrics');
+ const metrics = await this.metricsCollector!.collect();
+ this.metrics$.next(metrics);
+ }
+
+ public async stop() {
+ if (this.collectInterval) {
+ clearInterval(this.collectInterval);
+ }
+ this.metrics$.complete();
+ }
+}
diff --git a/src/legacy/core_plugins/kibana/public/home/index.ts b/src/core/server/metrics/ops_config.ts
similarity index 69%
rename from src/legacy/core_plugins/kibana/public/home/index.ts
rename to src/core/server/metrics/ops_config.ts
index 74b6da33c6542..bd6ae5cc5474d 100644
--- a/src/legacy/core_plugins/kibana/public/home/index.ts
+++ b/src/core/server/metrics/ops_config.ts
@@ -17,13 +17,13 @@
* under the License.
*/
-import { PluginInitializerContext } from 'kibana/public';
-import { npSetup, npStart } from 'ui/new_platform';
-import { HomePlugin } from './plugin';
+import { schema, TypeOf } from '@kbn/config-schema';
-const instance = new HomePlugin({
- env: npSetup.plugins.kibanaLegacy.env,
-} as PluginInitializerContext);
-instance.setup(npSetup.core, npSetup.plugins);
+export const opsConfig = {
+ path: 'ops',
+ schema: schema.object({
+ interval: schema.duration({ defaultValue: '5s' }),
+ }),
+};
-instance.start(npStart.core, npStart.plugins);
+export type OpsConfigType = TypeOf;
diff --git a/src/core/server/metrics/ops_metrics_collector.test.mocks.ts b/src/core/server/metrics/ops_metrics_collector.test.mocks.ts
new file mode 100644
index 0000000000000..8265796d57970
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.test.mocks.ts
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const mockOsCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/os', () => ({
+ OsMetricsCollector: jest.fn().mockImplementation(() => mockOsCollector),
+}));
+
+export const mockProcessCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/process', () => ({
+ ProcessMetricsCollector: jest.fn().mockImplementation(() => mockProcessCollector),
+}));
+
+export const mockServerCollector = {
+ collect: jest.fn(),
+};
+jest.doMock('./collectors/server', () => ({
+ ServerMetricsCollector: jest.fn().mockImplementation(() => mockServerCollector),
+}));
diff --git a/src/core/server/metrics/ops_metrics_collector.test.ts b/src/core/server/metrics/ops_metrics_collector.test.ts
new file mode 100644
index 0000000000000..04302a195fb6c
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.test.ts
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ mockOsCollector,
+ mockProcessCollector,
+ mockServerCollector,
+} from './ops_metrics_collector.test.mocks';
+import { httpServiceMock } from '../http/http_service.mock';
+import { OpsMetricsCollector } from './ops_metrics_collector';
+
+describe('OpsMetricsCollector', () => {
+ let collector: OpsMetricsCollector;
+
+ beforeEach(() => {
+ const hapiServer = httpServiceMock.createSetupContract().server;
+ collector = new OpsMetricsCollector(hapiServer);
+
+ mockOsCollector.collect.mockResolvedValue('osMetrics');
+ });
+
+ it('gathers metrics from the underlying collectors', async () => {
+ mockOsCollector.collect.mockResolvedValue('osMetrics');
+ mockProcessCollector.collect.mockResolvedValue('processMetrics');
+ mockServerCollector.collect.mockResolvedValue({
+ requests: 'serverRequestsMetrics',
+ response_times: 'serverTimingMetrics',
+ });
+
+ const metrics = await collector.collect();
+
+ expect(mockOsCollector.collect).toHaveBeenCalledTimes(1);
+ expect(mockProcessCollector.collect).toHaveBeenCalledTimes(1);
+ expect(mockServerCollector.collect).toHaveBeenCalledTimes(1);
+
+ expect(metrics).toEqual({
+ process: 'processMetrics',
+ os: 'osMetrics',
+ requests: 'serverRequestsMetrics',
+ response_times: 'serverTimingMetrics',
+ });
+ });
+});
diff --git a/src/core/server/metrics/ops_metrics_collector.ts b/src/core/server/metrics/ops_metrics_collector.ts
new file mode 100644
index 0000000000000..04344f21f57f7
--- /dev/null
+++ b/src/core/server/metrics/ops_metrics_collector.ts
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Server as HapiServer } from 'hapi';
+import {
+ ProcessMetricsCollector,
+ OsMetricsCollector,
+ ServerMetricsCollector,
+ MetricsCollector,
+} from './collectors';
+import { OpsMetrics } from './types';
+
+export class OpsMetricsCollector implements MetricsCollector {
+ private readonly processCollector: ProcessMetricsCollector;
+ private readonly osCollector: OsMetricsCollector;
+ private readonly serverCollector: ServerMetricsCollector;
+
+ constructor(server: HapiServer) {
+ this.processCollector = new ProcessMetricsCollector();
+ this.osCollector = new OsMetricsCollector();
+ this.serverCollector = new ServerMetricsCollector(server);
+ }
+
+ public async collect(): Promise {
+ const [process, os, server] = await Promise.all([
+ this.processCollector.collect(),
+ this.osCollector.collect(),
+ this.serverCollector.collect(),
+ ]);
+ return {
+ process,
+ os,
+ ...server,
+ };
+ }
+}
diff --git a/src/core/server/metrics/types.ts b/src/core/server/metrics/types.ts
new file mode 100644
index 0000000000000..5c8f18fff380d
--- /dev/null
+++ b/src/core/server/metrics/types.ts
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Observable } from 'rxjs';
+import { OpsProcessMetrics, OpsOsMetrics, OpsServerMetrics } from './collectors';
+
+/**
+ * APIs to retrieves metrics gathered and exposed by the core platform.
+ *
+ * @public
+ */
+export interface MetricsServiceSetup {
+ /**
+ * Retrieve an observable emitting the {@link OpsMetrics} gathered.
+ * The observable will emit an initial value during core's `start` phase, and a new value every fixed interval of time,
+ * based on the `opts.interval` configuration property.
+ *
+ * @example
+ * ```ts
+ * core.metrics.getOpsMetrics$().subscribe(metrics => {
+ * // do something with the metrics
+ * })
+ * ```
+ */
+ getOpsMetrics$: () => Observable;
+}
+// eslint-disable-next-line @typescript-eslint/no-empty-interface
+export interface MetricsServiceStart {}
+
+export type InternalMetricsServiceSetup = MetricsServiceSetup;
+export type InternalMetricsServiceStart = MetricsServiceStart;
+
+/**
+ * Regroups metrics gathered by all the collectors.
+ * This contains metrics about the os/runtime, the kibana process and the http server.
+ *
+ * @public
+ */
+export interface OpsMetrics {
+ /** Process related metrics */
+ process: OpsProcessMetrics;
+ /** OS related metrics */
+ os: OpsOsMetrics;
+ /** server response time stats */
+ response_times: OpsServerMetrics['response_times'];
+ /** server requests stats */
+ requests: OpsServerMetrics['requests'];
+ /** number of current concurrent connections to the server */
+ concurrent_connections: OpsServerMetrics['concurrent_connections'];
+}
diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts
index 96b28ab5827e1..037f3bbed67e0 100644
--- a/src/core/server/mocks.ts
+++ b/src/core/server/mocks.ts
@@ -30,6 +30,8 @@ import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
import { SharedGlobalConfig } from './plugins';
import { InternalCoreSetup, InternalCoreStart } from './internal_types';
import { capabilitiesServiceMock } from './capabilities/capabilities_service.mock';
+import { metricsServiceMock } from './metrics/metrics_service.mock';
+import { uuidServiceMock } from './uuid/uuid_service.mock';
export { httpServerMock } from './http/http_server.mocks';
export { sessionStorageMock } from './http/cookie_session_storage.mocks';
@@ -40,7 +42,7 @@ export { loggingServiceMock } from './logging/logging_service.mock';
export { savedObjectsRepositoryMock } from './saved_objects/service/lib/repository.mock';
export { typeRegistryMock as savedObjectsTypeRegistryMock } from './saved_objects/saved_objects_type_registry.mock';
export { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
-import { uuidServiceMock } from './uuid/uuid_service.mock';
+export { metricsServiceMock } from './metrics/metrics_service.mock';
export function pluginInitializerContextConfigMock(config: T) {
const globalConfig: SharedGlobalConfig = {
@@ -153,6 +155,7 @@ function createInternalCoreSetupMock() {
uiSettings: uiSettingsServiceMock.createSetupContract(),
savedObjects: savedObjectsServiceMock.createInternalSetupContract(),
uuid: uuidServiceMock.createSetupContract(),
+ metrics: metricsServiceMock.createInternalSetupContract(),
};
return setupDeps;
}
diff --git a/src/core/server/plugins/plugin.ts b/src/core/server/plugins/plugin.ts
index d6c774f6fc41c..b372874264eb5 100644
--- a/src/core/server/plugins/plugin.ts
+++ b/src/core/server/plugins/plugin.ts
@@ -95,7 +95,7 @@ export class PluginWrapper<
public async setup(setupContext: CoreSetup, plugins: TPluginsSetup) {
this.instance = this.createPluginInstance();
- this.log.info('Setting up plugin');
+ this.log.debug('Setting up plugin');
return this.instance.setup(setupContext, plugins);
}
@@ -112,6 +112,8 @@ export class PluginWrapper<
throw new Error(`Plugin "${this.name}" can't be started since it isn't set up.`);
}
+ this.log.debug('Starting plugin');
+
const startContract = await this.instance.start(startContext, plugins);
this.startDependencies$.next([startContext, plugins]);
return startContract;
diff --git a/src/core/server/plugins/plugin_context.ts b/src/core/server/plugins/plugin_context.ts
index a8a16713f69a4..f2a44e9f78d4f 100644
--- a/src/core/server/plugins/plugin_context.ts
+++ b/src/core/server/plugins/plugin_context.ts
@@ -170,6 +170,7 @@ export function createPluginSetupContext(
setClientFactoryProvider: deps.savedObjects.setClientFactoryProvider,
addClientWrapper: deps.savedObjects.addClientWrapper,
registerType: deps.savedObjects.registerType,
+ getImportExportObjectLimit: deps.savedObjects.getImportExportObjectLimit,
},
uiSettings: {
register: deps.uiSettings.register,
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
index 1088478add137..32485f461f59b 100644
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
+++ b/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
@@ -17,7 +17,7 @@
* under the License.
*/
-import { getSortedObjectsForExport } from './get_sorted_objects_for_export';
+import { exportSavedObjectsToStream } from './get_sorted_objects_for_export';
import { savedObjectsClientMock } from '../service/saved_objects_client.mock';
import { Readable } from 'stream';
import { createPromiseFromStreams, createConcatStream } from '../../../../legacy/utils/streams';
@@ -65,7 +65,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -151,7 +151,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -210,7 +210,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -297,7 +297,7 @@ describe('getSortedObjectsForExport()', () => {
per_page: 1,
page: 0,
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 500,
types: ['index-pattern', 'search'],
@@ -385,7 +385,7 @@ describe('getSortedObjectsForExport()', () => {
page: 0,
});
await expect(
- getSortedObjectsForExport({
+ exportSavedObjectsToStream({
savedObjectsClient,
exportSizeLimit: 1,
types: ['index-pattern', 'search'],
@@ -425,7 +425,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
types: ['index-pattern'],
@@ -489,7 +489,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
objects: [
@@ -587,7 +587,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
});
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
objects: [
@@ -681,7 +681,7 @@ describe('getSortedObjectsForExport()', () => {
},
],
};
- await expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ await expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Can't export more than 1 objects"`
);
});
@@ -694,7 +694,7 @@ describe('getSortedObjectsForExport()', () => {
objects: undefined,
};
- expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Either \`type\` or \`objects\` are required."`
);
});
@@ -707,7 +707,7 @@ describe('getSortedObjectsForExport()', () => {
search: 'foo',
};
- expect(getSortedObjectsForExport(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Can't specify both \\"search\\" and \\"objects\\" properties when exporting"`
);
});
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
index 4b4cf1146aca0..a703c9f9fbd96 100644
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
+++ b/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
@@ -124,7 +124,13 @@ async function fetchObjectsToExport({
}
}
-export async function getSortedObjectsForExport({
+/**
+ * Generates sorted saved object stream to be used for export.
+ * See the {@link SavedObjectsExportOptions | options} for more detailed information.
+ *
+ * @public
+ */
+export async function exportSavedObjectsToStream({
types,
objects,
search,
diff --git a/src/core/server/saved_objects/export/index.ts b/src/core/server/saved_objects/export/index.ts
index 7533b8e500039..37824cceb18cb 100644
--- a/src/core/server/saved_objects/export/index.ts
+++ b/src/core/server/saved_objects/export/index.ts
@@ -18,7 +18,7 @@
*/
export {
- getSortedObjectsForExport,
+ exportSavedObjectsToStream,
SavedObjectsExportOptions,
SavedObjectsExportResultDetails,
} from './get_sorted_objects_for_export';
diff --git a/src/core/server/saved_objects/import/import_saved_objects.test.ts b/src/core/server/saved_objects/import/import_saved_objects.test.ts
index f0719cbf4c829..b43e5063c13e1 100644
--- a/src/core/server/saved_objects/import/import_saved_objects.test.ts
+++ b/src/core/server/saved_objects/import/import_saved_objects.test.ts
@@ -19,7 +19,7 @@
import { Readable } from 'stream';
import { SavedObject } from '../types';
-import { importSavedObjects } from './import_saved_objects';
+import { importSavedObjectsFromStream } from './import_saved_objects';
import { savedObjectsClientMock } from '../../mocks';
const emptyResponse = {
@@ -76,7 +76,7 @@ describe('importSavedObjects()', () => {
this.push(null);
},
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 1,
overwrite: false,
@@ -103,7 +103,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -186,7 +186,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -270,7 +270,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: true,
@@ -362,7 +362,7 @@ describe('importSavedObjects()', () => {
references: [],
})),
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -460,7 +460,7 @@ describe('importSavedObjects()', () => {
},
],
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 4,
overwrite: false,
@@ -536,7 +536,7 @@ describe('importSavedObjects()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
readStream,
objectLimit: 5,
overwrite: false,
diff --git a/src/core/server/saved_objects/import/import_saved_objects.ts b/src/core/server/saved_objects/import/import_saved_objects.ts
index ef3b4a214c2c2..cb1d70e5c8dc4 100644
--- a/src/core/server/saved_objects/import/import_saved_objects.ts
+++ b/src/core/server/saved_objects/import/import_saved_objects.ts
@@ -26,7 +26,13 @@ import {
} from './types';
import { validateReferences } from './validate_references';
-export async function importSavedObjects({
+/**
+ * Import saved objects from given stream. See the {@link SavedObjectsImportOptions | options} for more
+ * detailed information.
+ *
+ * @public
+ */
+export async function importSavedObjectsFromStream({
readStream,
objectLimit,
overwrite,
diff --git a/src/core/server/saved_objects/import/index.ts b/src/core/server/saved_objects/import/index.ts
index 95fa8aa192f3e..e268e970b94ac 100644
--- a/src/core/server/saved_objects/import/index.ts
+++ b/src/core/server/saved_objects/import/index.ts
@@ -17,8 +17,8 @@
* under the License.
*/
-export { importSavedObjects } from './import_saved_objects';
-export { resolveImportErrors } from './resolve_import_errors';
+export { importSavedObjectsFromStream } from './import_saved_objects';
+export { resolveSavedObjectsImportErrors } from './resolve_import_errors';
export {
SavedObjectsImportResponse,
SavedObjectsImportError,
diff --git a/src/core/server/saved_objects/import/resolve_import_errors.test.ts b/src/core/server/saved_objects/import/resolve_import_errors.test.ts
index c522d76f1ff04..2c6d89e0a0a47 100644
--- a/src/core/server/saved_objects/import/resolve_import_errors.test.ts
+++ b/src/core/server/saved_objects/import/resolve_import_errors.test.ts
@@ -19,7 +19,7 @@
import { Readable } from 'stream';
import { SavedObject } from '../types';
-import { resolveImportErrors } from './resolve_import_errors';
+import { resolveSavedObjectsImportErrors } from './resolve_import_errors';
import { savedObjectsClientMock } from '../../mocks';
describe('resolveImportErrors()', () => {
@@ -80,7 +80,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: [],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [],
@@ -107,7 +107,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValueOnce({
saved_objects: savedObjects.filter(obj => obj.type === 'visualization' && obj.id === '3'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -168,7 +168,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'index-pattern' && obj.id === '1'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -230,7 +230,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'dashboard' && obj.id === '4'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
@@ -312,7 +312,7 @@ describe('resolveImportErrors()', () => {
references: [],
})),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: savedObjects.map(obj => ({
@@ -415,7 +415,7 @@ describe('resolveImportErrors()', () => {
},
],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 2,
retries: [
@@ -503,7 +503,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: [],
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 5,
retries: [
@@ -547,7 +547,7 @@ describe('resolveImportErrors()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects.filter(obj => obj.type === 'index-pattern' && obj.id === '1'),
});
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
readStream,
objectLimit: 4,
retries: [
diff --git a/src/core/server/saved_objects/import/resolve_import_errors.ts b/src/core/server/saved_objects/import/resolve_import_errors.ts
index 6f56f283b4aec..d9ac567882573 100644
--- a/src/core/server/saved_objects/import/resolve_import_errors.ts
+++ b/src/core/server/saved_objects/import/resolve_import_errors.ts
@@ -27,7 +27,13 @@ import {
} from './types';
import { validateReferences } from './validate_references';
-export async function resolveImportErrors({
+/**
+ * Resolve and return saved object import errors.
+ * See the {@link SavedObjectsResolveImportErrorsOptions | options} for more detailed informations.
+ *
+ * @public
+ */
+export async function resolveSavedObjectsImportErrors({
readStream,
objectLimit,
retries,
diff --git a/src/core/server/saved_objects/import/types.ts b/src/core/server/saved_objects/import/types.ts
index 44046378a7b97..067579f54edac 100644
--- a/src/core/server/saved_objects/import/types.ts
+++ b/src/core/server/saved_objects/import/types.ts
@@ -107,11 +107,17 @@ export interface SavedObjectsImportResponse {
* @public
*/
export interface SavedObjectsImportOptions {
+ /** The stream of {@link SavedObject | saved objects} to import */
readStream: Readable;
+ /** The maximum number of object to import */
objectLimit: number;
+ /** if true, will override existing object if present */
overwrite: boolean;
+ /** {@link SavedObjectsClientContract | client} to use to perform the import operation */
savedObjectsClient: SavedObjectsClientContract;
+ /** the list of allowed types to import */
supportedTypes: string[];
+ /** if specified, will import in given namespace, else will import as global object */
namespace?: string;
}
@@ -120,10 +126,16 @@ export interface SavedObjectsImportOptions {
* @public
*/
export interface SavedObjectsResolveImportErrorsOptions {
+ /** The stream of {@link SavedObject | saved objects} to resolve errors from */
readStream: Readable;
+ /** The maximum number of object to import */
objectLimit: number;
+ /** client to use to perform the import operation */
savedObjectsClient: SavedObjectsClientContract;
+ /** saved object import references to retry */
retries: SavedObjectsImportRetry[];
+ /** the list of allowed types to import */
supportedTypes: string[];
+ /** if specified, will import in given namespace */
namespace?: string;
}
diff --git a/src/core/server/saved_objects/index.ts b/src/core/server/saved_objects/index.ts
index 9bfe658028258..661c6cbb79e58 100644
--- a/src/core/server/saved_objects/index.ts
+++ b/src/core/server/saved_objects/index.ts
@@ -26,7 +26,7 @@ export { SavedObjectsManagement } from './management';
export * from './import';
export {
- getSortedObjectsForExport,
+ exportSavedObjectsToStream,
SavedObjectsExportOptions,
SavedObjectsExportResultDetails,
} from './export';
diff --git a/src/core/server/saved_objects/routes/export.ts b/src/core/server/saved_objects/routes/export.ts
index ab287332d8a65..04d310681aec5 100644
--- a/src/core/server/saved_objects/routes/export.ts
+++ b/src/core/server/saved_objects/routes/export.ts
@@ -26,7 +26,7 @@ import {
} from '../../../../legacy/utils/streams';
import { IRouter } from '../../http';
import { SavedObjectConfig } from '../saved_objects_config';
-import { getSortedObjectsForExport } from '../export';
+import { exportSavedObjectsToStream } from '../export';
export const registerExportRoute = (
router: IRouter,
@@ -67,7 +67,7 @@ export const registerExportRoute = (
router.handleLegacyErrors(async (context, req, res) => {
const savedObjectsClient = context.core.savedObjects.client;
const { type, objects, search, excludeExportDetails, includeReferencesDeep } = req.body;
- const exportStream = await getSortedObjectsForExport({
+ const exportStream = await exportSavedObjectsToStream({
savedObjectsClient,
types: typeof type === 'string' ? [type] : type,
search,
diff --git a/src/core/server/saved_objects/routes/import.ts b/src/core/server/saved_objects/routes/import.ts
index e3f249dca05f7..313e84c0b301d 100644
--- a/src/core/server/saved_objects/routes/import.ts
+++ b/src/core/server/saved_objects/routes/import.ts
@@ -21,7 +21,7 @@ import { Readable } from 'stream';
import { extname } from 'path';
import { schema } from '@kbn/config-schema';
import { IRouter } from '../../http';
-import { importSavedObjects } from '../import';
+import { importSavedObjectsFromStream } from '../import';
import { SavedObjectConfig } from '../saved_objects_config';
import { createSavedObjectsStreamFromNdJson } from './utils';
@@ -65,7 +65,7 @@ export const registerImportRoute = (
return res.badRequest({ body: `Invalid file extension ${fileExtension}` });
}
- const result = await importSavedObjects({
+ const result = await importSavedObjectsFromStream({
supportedTypes,
savedObjectsClient: context.core.savedObjects.client,
readStream: createSavedObjectsStreamFromNdJson(file),
diff --git a/src/core/server/saved_objects/routes/integration_tests/export.test.ts b/src/core/server/saved_objects/routes/integration_tests/export.test.ts
index b52a8957176cc..a81079b6825d6 100644
--- a/src/core/server/saved_objects/routes/integration_tests/export.test.ts
+++ b/src/core/server/saved_objects/routes/integration_tests/export.test.ts
@@ -18,7 +18,7 @@
*/
jest.mock('../../export', () => ({
- getSortedObjectsForExport: jest.fn(),
+ exportSavedObjectsToStream: jest.fn(),
}));
import * as exportMock from '../../export';
@@ -30,7 +30,7 @@ import { registerExportRoute } from '../export';
import { setupServer } from './test_utils';
type setupServerReturn = UnwrapPromise>;
-const getSortedObjectsForExport = exportMock.getSortedObjectsForExport as jest.Mock;
+const exportSavedObjectsToStream = exportMock.exportSavedObjectsToStream as jest.Mock;
const allowedTypes = ['index-pattern', 'search'];
const config = {
maxImportPayloadBytes: 10485760,
@@ -76,7 +76,7 @@ describe('POST /api/saved_objects/_export', () => {
],
},
];
- getSortedObjectsForExport.mockResolvedValueOnce(createListStream(sortedObjects));
+ exportSavedObjectsToStream.mockResolvedValueOnce(createListStream(sortedObjects));
const result = await supertest(httpSetup.server.listener)
.post('/api/saved_objects/_export')
@@ -96,7 +96,7 @@ describe('POST /api/saved_objects/_export', () => {
const objects = (result.text as string).split('\n').map(row => JSON.parse(row));
expect(objects).toEqual(sortedObjects);
- expect(getSortedObjectsForExport.mock.calls[0][0]).toEqual(
+ expect(exportSavedObjectsToStream.mock.calls[0][0]).toEqual(
expect.objectContaining({
excludeExportDetails: false,
exportSizeLimit: 10000,
diff --git a/src/core/server/saved_objects/routes/resolve_import_errors.ts b/src/core/server/saved_objects/routes/resolve_import_errors.ts
index efa7add7951b0..a10a19ba1d8ff 100644
--- a/src/core/server/saved_objects/routes/resolve_import_errors.ts
+++ b/src/core/server/saved_objects/routes/resolve_import_errors.ts
@@ -21,7 +21,7 @@ import { extname } from 'path';
import { Readable } from 'stream';
import { schema } from '@kbn/config-schema';
import { IRouter } from '../../http';
-import { resolveImportErrors } from '../import';
+import { resolveSavedObjectsImportErrors } from '../import';
import { SavedObjectConfig } from '../saved_objects_config';
import { createSavedObjectsStreamFromNdJson } from './utils';
@@ -75,7 +75,7 @@ export const registerResolveImportErrorsRoute = (
if (fileExtension !== '.ndjson') {
return res.badRequest({ body: `Invalid file extension ${fileExtension}` });
}
- const result = await resolveImportErrors({
+ const result = await resolveSavedObjectsImportErrors({
supportedTypes,
savedObjectsClient: context.core.savedObjects.client,
readStream: createSavedObjectsStreamFromNdJson(file),
diff --git a/src/core/server/saved_objects/saved_objects_service.mock.ts b/src/core/server/saved_objects/saved_objects_service.mock.ts
index cbdff16324536..9fe32b14e6450 100644
--- a/src/core/server/saved_objects/saved_objects_service.mock.ts
+++ b/src/core/server/saved_objects/saved_objects_service.mock.ts
@@ -64,8 +64,11 @@ const createSetupContractMock = () => {
setClientFactoryProvider: jest.fn(),
addClientWrapper: jest.fn(),
registerType: jest.fn(),
+ getImportExportObjectLimit: jest.fn(),
};
+ setupContract.getImportExportObjectLimit.mockReturnValue(100);
+
return setupContract;
};
diff --git a/src/core/server/saved_objects/saved_objects_service.ts b/src/core/server/saved_objects/saved_objects_service.ts
index 62e25ad5fb458..89f7990c771c8 100644
--- a/src/core/server/saved_objects/saved_objects_service.ts
+++ b/src/core/server/saved_objects/saved_objects_service.ts
@@ -154,6 +154,11 @@ export interface SavedObjectsServiceSetup {
* This API is the single entry point to register saved object types in the new platform.
*/
registerType: (type: SavedObjectsType) => void;
+
+ /**
+ * Returns the maximum number of objects allowed for import or export operations.
+ */
+ getImportExportObjectLimit: () => number;
}
/**
@@ -344,6 +349,7 @@ export class SavedObjectsService
}
this.typeRegistry.registerType(type);
},
+ getImportExportObjectLimit: () => this.config!.maxImportExportSize,
};
}
diff --git a/src/core/server/saved_objects/types.ts b/src/core/server/saved_objects/types.ts
index 495d896ad12cd..c9c672d0f8b1c 100644
--- a/src/core/server/saved_objects/types.ts
+++ b/src/core/server/saved_objects/types.ts
@@ -62,7 +62,6 @@ export interface SavedObjectsMigrationVersion {
}
/**
- *
* @public
*/
export interface SavedObject {
diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md
index 42bc1ce214b19..6b0d962aedcd1 100644
--- a/src/core/server/server.api.md
+++ b/src/core/server/server.api.md
@@ -685,6 +685,9 @@ export interface DeprecationSettings {
message: string;
}
+// @public
+export type DestructiveRouteMethod = 'post' | 'put' | 'delete' | 'patch';
+
// @public
export interface DiscoveredPlugin {
readonly configPath: ConfigPath;
@@ -763,6 +766,9 @@ export interface ErrorHttpResponseOptions {
headers?: ResponseHeaders;
}
+// @public
+export function exportSavedObjectsToStream({ types, objects, search, savedObjectsClient, exportSizeLimit, includeReferencesDeep, excludeExportDetails, namespace, }: SavedObjectsExportOptions): Promise;
+
// @public
export interface FakeRequest {
headers: Headers;
@@ -891,6 +897,9 @@ export interface ImageValidation {
};
}
+// @public
+export function importSavedObjectsFromStream({ readStream, objectLimit, overwrite, savedObjectsClient, supportedTypes, namespace, }: SavedObjectsImportOptions): Promise;
+
// @public (undocumented)
export interface IndexSettingsDeprecationInfo {
// (undocumented)
@@ -1176,6 +1185,11 @@ export interface LogRecord {
timestamp: Date;
}
+// @public
+export interface MetricsServiceSetup {
+ getOpsMetrics$: () => Observable;
+}
+
// @public (undocumented)
export type MIGRATION_ASSISTANCE_INDEX_ACTION = 'upgrade' | 'reindex';
@@ -1227,6 +1241,63 @@ export interface OnPreResponseToolkit {
next: (responseExtensions?: OnPreResponseExtensions) => OnPreResponseResult;
}
+// @public
+export interface OpsMetrics {
+ concurrent_connections: OpsServerMetrics['concurrent_connections'];
+ os: OpsOsMetrics;
+ process: OpsProcessMetrics;
+ requests: OpsServerMetrics['requests'];
+ response_times: OpsServerMetrics['response_times'];
+}
+
+// @public
+export interface OpsOsMetrics {
+ distro?: string;
+ distroRelease?: string;
+ load: {
+ '1m': number;
+ '5m': number;
+ '15m': number;
+ };
+ memory: {
+ total_in_bytes: number;
+ free_in_bytes: number;
+ used_in_bytes: number;
+ };
+ platform: NodeJS.Platform;
+ platformRelease: string;
+ uptime_in_millis: number;
+}
+
+// @public
+export interface OpsProcessMetrics {
+ event_loop_delay: number;
+ memory: {
+ heap: {
+ total_in_bytes: number;
+ used_in_bytes: number;
+ size_limit: number;
+ };
+ resident_set_size_in_bytes: number;
+ };
+ pid: number;
+ uptime_in_millis: number;
+}
+
+// @public
+export interface OpsServerMetrics {
+ concurrent_connections: number;
+ requests: {
+ disconnects: number;
+ total: number;
+ statusCodes: Record;
+ };
+ response_times: {
+ avg_in_millis: number;
+ max_in_millis: number;
+ };
+}
+
// @public (undocumented)
export interface PackageInfo {
// (undocumented)
@@ -1369,6 +1440,9 @@ export type RequestHandlerContextContainer = IContextContainer = IContextProvider, TContextName>;
+// @public
+export function resolveSavedObjectsImportErrors({ readStream, objectLimit, retries, savedObjectsClient, supportedTypes, namespace, }: SavedObjectsResolveImportErrorsOptions): Promise;
+
// @public
export type ResponseError = string | Error | {
message: string | Error;
@@ -1397,6 +1471,7 @@ export interface RouteConfigOptions {
authRequired?: boolean;
body?: Method extends 'get' | 'options' ? undefined : RouteConfigOptionsBody;
tags?: readonly string[];
+ xsrfRequired?: Method extends 'get' ? never : boolean;
}
// @public
@@ -1411,7 +1486,7 @@ export interface RouteConfigOptionsBody {
export type RouteContentType = 'application/json' | 'application/*+json' | 'application/octet-stream' | 'application/x-www-form-urlencoded' | 'multipart/form-data' | 'text/*';
// @public
-export type RouteMethod = 'get' | 'post' | 'put' | 'delete' | 'patch' | 'options';
+export type RouteMethod = SafeRouteMethod | DestructiveRouteMethod;
// @public
export type RouteRegistrar = (route: RouteConfig
, handler: RequestHandler
) => void;
@@ -1464,6 +1539,9 @@ export interface RouteValidatorOptions {
};
}
+// @public
+export type SafeRouteMethod = 'get' | 'options';
+
// @public (undocumented)
export interface SavedObject {
attributes: T;
@@ -1827,17 +1905,11 @@ export interface SavedObjectsImportMissingReferencesError {
// @public
export interface SavedObjectsImportOptions {
- // (undocumented)
namespace?: string;
- // (undocumented)
objectLimit: number;
- // (undocumented)
overwrite: boolean;
- // (undocumented)
readStream: Readable;
- // (undocumented)
savedObjectsClient: SavedObjectsClientContract;
- // (undocumented)
supportedTypes: string[];
}
@@ -1991,17 +2063,11 @@ export interface SavedObjectsRepositoryFactory {
// @public
export interface SavedObjectsResolveImportErrorsOptions {
- // (undocumented)
namespace?: string;
- // (undocumented)
objectLimit: number;
- // (undocumented)
readStream: Readable;
- // (undocumented)
retries: SavedObjectsImportRetry[];
- // (undocumented)
savedObjectsClient: SavedObjectsClientContract;
- // (undocumented)
supportedTypes: string[];
}
@@ -2032,6 +2098,7 @@ export class SavedObjectsSerializer {
// @public
export interface SavedObjectsServiceSetup {
addClientWrapper: (priority: number, id: string, factory: SavedObjectsClientWrapperFactory) => void;
+ getImportExportObjectLimit: () => number;
registerType: (type: SavedObjectsType) => void;
setClientFactoryProvider: (clientFactoryProvider: SavedObjectsClientFactoryProvider) => void;
}
diff --git a/src/core/server/server.test.mocks.ts b/src/core/server/server.test.mocks.ts
index 038c4651ff5a7..53d1b742a6494 100644
--- a/src/core/server/server.test.mocks.ts
+++ b/src/core/server/server.test.mocks.ts
@@ -79,3 +79,9 @@ export const mockUuidService = uuidServiceMock.create();
jest.doMock('./uuid/uuid_service', () => ({
UuidService: jest.fn(() => mockUuidService),
}));
+
+import { metricsServiceMock } from './metrics/metrics_service.mock';
+export const mockMetricsService = metricsServiceMock.create();
+jest.doMock('./metrics/metrics_service', () => ({
+ MetricsService: jest.fn(() => mockMetricsService),
+}));
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index 161dd3759a218..a4b5a9d81df20 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -28,6 +28,7 @@ import {
mockEnsureValidConfiguration,
mockUiSettingsService,
mockRenderingService,
+ mockMetricsService,
} from './server.test.mocks';
import { BehaviorSubject } from 'rxjs';
@@ -61,6 +62,7 @@ test('sets up services on "setup"', async () => {
expect(mockSavedObjectsService.setup).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
expect(mockRenderingService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
await server.setup();
@@ -71,6 +73,7 @@ test('sets up services on "setup"', async () => {
expect(mockSavedObjectsService.setup).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.setup).toHaveBeenCalledTimes(1);
expect(mockRenderingService.setup).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.setup).toHaveBeenCalledTimes(1);
});
test('injects legacy dependency to context#setup()', async () => {
@@ -107,6 +110,7 @@ test('runs services on "start"', async () => {
expect(mockLegacyService.start).not.toHaveBeenCalled();
expect(mockSavedObjectsService.start).not.toHaveBeenCalled();
expect(mockUiSettingsService.start).not.toHaveBeenCalled();
+ expect(mockMetricsService.start).not.toHaveBeenCalled();
await server.start();
@@ -114,6 +118,7 @@ test('runs services on "start"', async () => {
expect(mockLegacyService.start).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.start).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.start).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.start).toHaveBeenCalledTimes(1);
});
test('does not fail on "setup" if there are unused paths detected', async () => {
@@ -135,6 +140,7 @@ test('stops services on "stop"', async () => {
expect(mockLegacyService.stop).not.toHaveBeenCalled();
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
expect(mockUiSettingsService.stop).not.toHaveBeenCalled();
+ expect(mockMetricsService.stop).not.toHaveBeenCalled();
await server.stop();
@@ -144,6 +150,7 @@ test('stops services on "stop"', async () => {
expect(mockLegacyService.stop).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.stop).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.stop).toHaveBeenCalledTimes(1);
+ expect(mockMetricsService.stop).toHaveBeenCalledTimes(1);
});
test(`doesn't setup core services if config validation fails`, async () => {
@@ -159,6 +166,7 @@ test(`doesn't setup core services if config validation fails`, async () => {
expect(mockLegacyService.setup).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
expect(mockRenderingService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
});
test(`doesn't setup core services if legacy config validation fails`, async () => {
@@ -178,4 +186,5 @@ test(`doesn't setup core services if legacy config validation fails`, async () =
expect(mockLegacyService.setup).not.toHaveBeenCalled();
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
+ expect(mockMetricsService.setup).not.toHaveBeenCalled();
});
diff --git a/src/core/server/server.ts b/src/core/server/server.ts
index db2493b38d6e0..8603f5fba1da8 100644
--- a/src/core/server/server.ts
+++ b/src/core/server/server.ts
@@ -34,6 +34,7 @@ import { Logger, LoggerFactory } from './logging';
import { UiSettingsService } from './ui_settings';
import { PluginsService, config as pluginsConfig } from './plugins';
import { SavedObjectsService } from '../server/saved_objects';
+import { MetricsService, opsConfig } from './metrics';
import { config as cspConfig } from './csp';
import { config as elasticsearchConfig } from './elasticsearch';
@@ -67,6 +68,7 @@ export class Server {
private readonly savedObjects: SavedObjectsService;
private readonly uiSettings: UiSettingsService;
private readonly uuid: UuidService;
+ private readonly metrics: MetricsService;
private coreStart?: InternalCoreStart;
@@ -89,6 +91,7 @@ export class Server {
this.uiSettings = new UiSettingsService(core);
this.capabilities = new CapabilitiesService(core);
this.uuid = new UuidService(core);
+ this.metrics = new MetricsService(core);
}
public async setup() {
@@ -137,6 +140,8 @@ export class Server {
legacyPlugins,
});
+ const metricsSetup = await this.metrics.setup({ http: httpSetup });
+
const coreSetup: InternalCoreSetup = {
capabilities: capabilitiesSetup,
context: contextServiceSetup,
@@ -145,6 +150,7 @@ export class Server {
uiSettings: uiSettingsSetup,
savedObjects: savedObjectsSetup,
uuid: uuidSetup,
+ metrics: metricsSetup,
};
const pluginsSetup = await this.plugins.setup(coreSetup);
@@ -193,6 +199,7 @@ export class Server {
await this.http.start();
await this.rendering.start();
+ await this.metrics.start();
return this.coreStart;
}
@@ -207,6 +214,7 @@ export class Server {
await this.http.stop();
await this.uiSettings.stop();
await this.rendering.stop();
+ await this.metrics.stop();
}
private registerDefaultRoute(httpSetup: InternalHttpServiceSetup) {
@@ -260,6 +268,7 @@ export class Server {
[savedObjectsConfig.path, savedObjectsConfig.schema],
[savedObjectsMigrationConfig.path, savedObjectsMigrationConfig.schema],
[uiSettingsConfig.path, uiSettingsConfig.schema],
+ [opsConfig.path, opsConfig.schema],
];
this.configService.addDeprecationProvider(rootConfigPath, coreDeprecationProvider);
diff --git a/src/core/utils/merge.test.ts b/src/core/utils/merge.test.ts
index c857e980dec21..7ef07a83399ac 100644
--- a/src/core/utils/merge.test.ts
+++ b/src/core/utils/merge.test.ts
@@ -17,6 +17,7 @@
* under the License.
*/
+// eslint-disable-next-line max-classes-per-file
import { merge } from './merge';
describe('merge', () => {
@@ -62,6 +63,29 @@ describe('merge', () => {
expect(merge({ a: 0 }, { a: 1 }, {})).toEqual({ a: 1 });
});
+ test('does not merge class instances', () => {
+ class Folder {
+ constructor(public readonly path: string) {}
+ getPath() {
+ return this.path;
+ }
+ }
+ class File {
+ constructor(public readonly content: string) {}
+ getContent() {
+ return this.content;
+ }
+ }
+ const folder = new Folder('/etc');
+ const file = new File('yolo');
+
+ const result = merge({}, { content: folder }, { content: file });
+ expect(result).toStrictEqual({
+ content: file,
+ });
+ expect(result.content.getContent()).toBe('yolo');
+ });
+
test(`doesn't pollute prototypes`, () => {
merge({}, JSON.parse('{ "__proto__": { "foo": "bar" } }'));
merge({}, JSON.parse('{ "constructor": { "prototype": { "foo": "bar" } } }'));
diff --git a/src/core/utils/merge.ts b/src/core/utils/merge.ts
index 8e5d9f4860d95..43878c27b1e19 100644
--- a/src/core/utils/merge.ts
+++ b/src/core/utils/merge.ts
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-
+import { isPlainObject } from 'lodash';
/**
* Deeply merges two objects, omitting undefined values, and not deeply merging Arrays.
*
@@ -60,7 +60,7 @@ export function merge>(
) as TReturn;
}
-const isMergable = (obj: any) => typeof obj === 'object' && obj !== null && !Array.isArray(obj);
+const isMergable = (obj: any) => isPlainObject(obj);
const mergeObjects = , U extends Record>(
baseObj: T,
diff --git a/src/dev/storybook/aliases.ts b/src/dev/storybook/aliases.ts
index fb91b865097fa..35ac4e27f9c8b 100644
--- a/src/dev/storybook/aliases.ts
+++ b/src/dev/storybook/aliases.ts
@@ -20,6 +20,7 @@
export const storybookAliases = {
apm: 'x-pack/legacy/plugins/apm/scripts/storybook.js',
canvas: 'x-pack/legacy/plugins/canvas/scripts/storybook_new.js',
+ codeeditor: 'src/plugins/kibana_react/public/code_editor/scripts/storybook.ts',
drilldowns: 'x-pack/plugins/drilldowns/scripts/storybook.js',
embeddable: 'src/plugins/embeddable/scripts/storybook.js',
infra: 'x-pack/legacy/plugins/infra/scripts/storybook.js',
diff --git a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts
new file mode 100644
index 0000000000000..bfba4d7f4c8da
--- /dev/null
+++ b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.test.ts
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ fieldFormats,
+ FieldFormatsGetConfigFn,
+ esFilters,
+ IndexPatternsContract,
+} from '../../../../../../plugins/data/public';
+// eslint-disable-next-line @kbn/eslint/no-restricted-paths
+import { setIndexPatterns } from '../../../../../../plugins/data/public/services';
+import { dataPluginMock } from '../../../../../../plugins/data/public/mocks';
+import { createFiltersFromEvent, EventData } from './create_filters_from_event';
+import { mockDataServices } from '../../search/aggs/test_helpers';
+
+jest.mock('ui/new_platform');
+
+const mockField = {
+ name: 'bytes',
+ indexPattern: {
+ id: 'logstash-*',
+ },
+ filterable: true,
+ format: new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn),
+};
+
+describe('createFiltersFromEvent', () => {
+ let dataPoints: EventData[];
+
+ beforeEach(() => {
+ dataPoints = [
+ {
+ table: {
+ columns: [
+ {
+ name: 'test',
+ id: '1-1',
+ meta: {
+ type: 'histogram',
+ indexPatternId: 'logstash-*',
+ aggConfigParams: {
+ field: 'bytes',
+ interval: 30,
+ otherBucket: true,
+ },
+ },
+ },
+ ],
+ rows: [
+ {
+ '1-1': '2048',
+ },
+ ],
+ },
+ column: 0,
+ row: 0,
+ value: 'test',
+ },
+ ];
+
+ mockDataServices();
+ setIndexPatterns(({
+ ...dataPluginMock.createStartContract().indexPatterns,
+ get: async () => ({
+ id: 'logstash-*',
+ fields: {
+ getByName: () => mockField,
+ filter: () => [mockField],
+ },
+ }),
+ } as unknown) as IndexPatternsContract);
+ });
+
+ test('ignores event when value for rows is not provided', async () => {
+ dataPoints[0].table.rows[0]['1-1'] = null;
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(0);
+ });
+
+ test('handles an event when aggregations type is a terms', async () => {
+ if (dataPoints[0].table.columns[0].meta) {
+ dataPoints[0].table.columns[0].meta.type = 'terms';
+ }
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(1);
+ expect(filters[0].query.match_phrase.bytes).toEqual('2048');
+ });
+
+ test('handles an event when aggregations type is not terms', async () => {
+ const filters = await createFiltersFromEvent(dataPoints);
+
+ expect(filters.length).toEqual(1);
+
+ const [rangeFilter] = filters;
+
+ if (esFilters.isRangeFilter(rangeFilter)) {
+ expect(rangeFilter.range.bytes.gte).toEqual(2048);
+ expect(rangeFilter.range.bytes.lt).toEqual(2078);
+ }
+ });
+});
diff --git a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
similarity index 70%
rename from src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js
rename to src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
index 1037c718d0003..3713c781b0958 100644
--- a/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.js
+++ b/src/legacy/core_plugins/data/public/actions/filters/create_filters_from_event.ts
@@ -17,21 +17,33 @@
* under the License.
*/
-import { esFilters } from '../../../../../../plugins/data/public';
+import { KibanaDatatable } from '../../../../../../plugins/expressions/public';
+import { esFilters, Filter } from '../../../../../../plugins/data/public';
import { deserializeAggConfig } from '../../search/expressions/utils';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { getIndexPatterns } from '../../../../../../plugins/data/public/services';
+export interface EventData {
+ table: Pick;
+ column: number;
+ row: number;
+ value: any;
+}
+
/**
* For terms aggregations on `__other__` buckets, this assembles a list of applicable filter
* terms based on a specific cell in the tabified data.
*
- * @param {object} table - tabified table data
+ * @param {EventData['table']} table - tabified table data
* @param {number} columnIndex - current column index
* @param {number} rowIndex - current row index
* @return {array} - array of terms to filter against
*/
-const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
+const getOtherBucketFilterTerms = (
+ table: EventData['table'],
+ columnIndex: number,
+ rowIndex: number
+) => {
if (rowIndex === -1) {
return [];
}
@@ -42,7 +54,7 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
return row[column.id] === table.rows[rowIndex][column.id] || i >= columnIndex;
});
});
- const terms = rows.map(row => row[table.columns[columnIndex].id]);
+ const terms: any[] = rows.map(row => row[table.columns[columnIndex].id]);
return [
...new Set(
@@ -59,22 +71,27 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
* Assembles the filters needed to apply filtering against a specific cell value, while accounting
* for cases like if the value is a terms agg in an `__other__` or `__missing__` bucket.
*
- * @param {object} table - tabified table data
+ * @param {EventData['table']} table - tabified table data
* @param {number} columnIndex - current column index
* @param {number} rowIndex - current row index
* @param {string} cellValue - value of the current cell
- * @return {array|string} - filter or list of filters to provide to queryFilter.addFilters()
+ * @return {Filter[]|undefined} - list of filters to provide to queryFilter.addFilters()
*/
-const createFilter = async (table, columnIndex, rowIndex) => {
- if (!table || !table.columns || !table.columns[columnIndex]) return;
+const createFilter = async (table: EventData['table'], columnIndex: number, rowIndex: number) => {
+ if (!table || !table.columns || !table.columns[columnIndex]) {
+ return;
+ }
const column = table.columns[columnIndex];
+ if (!column.meta || !column.meta.indexPatternId) {
+ return;
+ }
const aggConfig = deserializeAggConfig({
type: column.meta.type,
- aggConfigParams: column.meta.aggConfigParams,
+ aggConfigParams: column.meta.aggConfigParams ? column.meta.aggConfigParams : {},
indexPattern: await getIndexPatterns().get(column.meta.indexPatternId),
});
- let filter = [];
- const value = rowIndex > -1 ? table.rows[rowIndex][column.id] : null;
+ let filter: Filter[] = [];
+ const value: any = rowIndex > -1 ? table.rows[rowIndex][column.id] : null;
if (value === null || value === undefined || !aggConfig.isFilterable()) {
return;
}
@@ -85,6 +102,10 @@ const createFilter = async (table, columnIndex, rowIndex) => {
filter = aggConfig.createFilter(value);
}
+ if (!filter) {
+ return;
+ }
+
if (!Array.isArray(filter)) {
filter = [filter];
}
@@ -92,19 +113,18 @@ const createFilter = async (table, columnIndex, rowIndex) => {
return filter;
};
-const createFiltersFromEvent = async event => {
- const filters = [];
- const dataPoints = event.data || [event];
+const createFiltersFromEvent = async (dataPoints: EventData[], negate?: boolean) => {
+ const filters: Filter[] = [];
await Promise.all(
dataPoints
.filter(point => point)
.map(async val => {
const { table, column, row } = val;
- const filter = await createFilter(table, column, row);
+ const filter: Filter[] = (await createFilter(table, column, row)) || [];
if (filter) {
filter.forEach(f => {
- if (event.negate) {
+ if (negate) {
f = esFilters.toggleFilterNegated(f);
}
filters.push(f);
diff --git a/src/legacy/core_plugins/data/public/actions/value_click_action.ts b/src/legacy/core_plugins/data/public/actions/value_click_action.ts
index 260b401e6d658..26933cc8ddb82 100644
--- a/src/legacy/core_plugins/data/public/actions/value_click_action.ts
+++ b/src/legacy/core_plugins/data/public/actions/value_click_action.ts
@@ -46,7 +46,9 @@ interface ActionContext {
async function isCompatible(context: ActionContext) {
try {
- const filters: Filter[] = (await createFiltersFromEvent(context.data)) || [];
+ const filters: Filter[] =
+ (await createFiltersFromEvent(context.data.data || [context.data], context.data.negate)) ||
+ [];
return filters.length > 0;
} catch {
return false;
@@ -71,7 +73,8 @@ export function valueClickAction(
throw new IncompatibleActionError();
}
- const filters: Filter[] = (await createFiltersFromEvent(data)) || [];
+ const filters: Filter[] =
+ (await createFiltersFromEvent(data.data || [data], data.negate)) || [];
let selectedFilters: Filter[] = esFilters.mapAndFlattenFilters(filters);
diff --git a/src/legacy/core_plugins/data/public/search/aggs/__tests__/buckets/_terms_other_bucket_helper.js b/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
similarity index 54%
rename from src/legacy/core_plugins/data/public/search/aggs/__tests__/buckets/_terms_other_bucket_helper.js
rename to src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
index 749dad377f2e2..976ab57c00b63 100644
--- a/src/legacy/core_plugins/data/public/search/aggs/__tests__/buckets/_terms_other_bucket_helper.js
+++ b/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
@@ -17,39 +17,73 @@
* under the License.
*/
-import expect from '@kbn/expect';
-import ngMock from 'ng_mock';
import {
buildOtherBucketAgg,
mergeOtherBucketAggResponse,
updateMissingBucket,
-} from '../../buckets/_terms_other_bucket_helper';
-import { start as visualizationsStart } from '../../../../../../../core_plugins/visualizations/public/np_ready/public/legacy';
-import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
+} from './_terms_other_bucket_helper';
+import { AggConfigs, CreateAggConfigParams } from '../agg_configs';
+import { BUCKET_TYPES } from './bucket_agg_types';
+import { IBucketAggConfig } from './_bucket_agg_type';
+import { mockDataServices, mockAggTypesRegistry } from '../test_helpers';
-const visConfigSingleTerm = {
- type: 'pie',
+const indexPattern = {
+ id: '1234',
+ title: 'logstash-*',
+ fields: [
+ {
+ name: 'field',
+ },
+ ],
+} as any;
+
+const singleTerm = {
aggs: [
{
- type: 'terms',
- schema: 'segment',
- params: { field: 'machine.os.raw', otherBucket: true, missingBucket: true },
+ id: '1',
+ type: BUCKET_TYPES.TERMS,
+ params: {
+ field: {
+ name: 'machine.os.raw',
+ indexPattern,
+ filterable: true,
+ },
+ otherBucket: true,
+ missingBucket: true,
+ },
},
],
};
-const visConfigNestedTerm = {
- type: 'pie',
+const nestedTerm = {
aggs: [
{
- type: 'terms',
- schema: 'segment',
- params: { field: 'geo.src', size: 2, otherBucket: false, missingBucket: false },
+ id: '1',
+ type: BUCKET_TYPES.TERMS,
+ params: {
+ field: {
+ name: 'geo.src',
+ indexPattern,
+ filterable: true,
+ },
+ size: 2,
+ otherBucket: false,
+ missingBucket: false,
+ },
},
{
- type: 'terms',
- schema: 'segment',
- params: { field: 'machine.os.raw', size: 2, otherBucket: true, missingBucket: true },
+ id: '2',
+ type: BUCKET_TYPES.TERMS,
+ params: {
+ field: {
+ name: 'machine.os.raw',
+ indexPattern,
+ filterable: true,
+ },
+ size: 2,
+ otherBucket: true,
+ missingBucket: true,
+ },
},
],
};
@@ -183,28 +217,36 @@ const nestedOtherResponse = {
status: 200,
};
-describe('Terms Agg Other bucket helper', () => {
- let vis;
+jest.mock('ui/new_platform');
- function init(aggConfig) {
- ngMock.module('kibana');
- ngMock.inject(Private => {
- const indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
+describe('Terms Agg Other bucket helper', () => {
+ const typesRegistry = mockAggTypesRegistry();
+ const getAggConfigs = (aggs: CreateAggConfigParams[] = []) => {
+ return new AggConfigs(indexPattern, [...aggs], { typesRegistry });
+ };
- vis = new visualizationsStart.Vis(indexPattern, aggConfig);
- });
- }
+ beforeEach(() => {
+ mockDataServices();
+ });
describe('buildOtherBucketAgg', () => {
- it('returns a function', () => {
- init(visConfigSingleTerm);
- const agg = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[0], singleTermResponse);
- expect(agg).to.be.a('function');
+ test('returns a function', () => {
+ const aggConfigs = getAggConfigs(singleTerm.aggs);
+ const agg = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[0] as IBucketAggConfig,
+ singleTermResponse
+ );
+ expect(typeof agg).toBe('function');
});
- it('correctly builds query with single terms agg', () => {
- init(visConfigSingleTerm);
- const agg = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[0], singleTermResponse)();
+ test('correctly builds query with single terms agg', () => {
+ const aggConfigs = getAggConfigs(singleTerm.aggs);
+ const agg = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[0] as IBucketAggConfig,
+ singleTermResponse
+ );
const expectedResponse = {
aggs: undefined,
filters: {
@@ -223,13 +265,19 @@ describe('Terms Agg Other bucket helper', () => {
},
},
};
-
- expect(agg['other-filter']).to.eql(expectedResponse);
+ expect(agg).toBeDefined();
+ if (agg) {
+ expect(agg()['other-filter']).toEqual(expectedResponse);
+ }
});
- it('correctly builds query for nested terms agg', () => {
- init(visConfigNestedTerm);
- const agg = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[1], nestedTermResponse)();
+ test('correctly builds query for nested terms agg', () => {
+ const aggConfigs = getAggConfigs(nestedTerm.aggs);
+ const agg = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[1] as IBucketAggConfig,
+ nestedTermResponse
+ );
const expectedResponse = {
'other-filter': {
aggs: undefined,
@@ -267,54 +315,84 @@ describe('Terms Agg Other bucket helper', () => {
},
},
};
-
- expect(agg).to.eql(expectedResponse);
+ expect(agg).toBeDefined();
+ if (agg) {
+ expect(agg()).toEqual(expectedResponse);
+ }
});
- it('returns false when nested terms agg has no buckets', () => {
- init(visConfigNestedTerm);
- const agg = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[1], nestedTermResponseNoResults);
- expect(agg).to.eql(false);
+ test('returns false when nested terms agg has no buckets', () => {
+ const aggConfigs = getAggConfigs(nestedTerm.aggs);
+ const agg = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[1] as IBucketAggConfig,
+ nestedTermResponseNoResults
+ );
+
+ expect(agg).toEqual(false);
});
});
describe('mergeOtherBucketAggResponse', () => {
- it('correctly merges other bucket with single terms agg', () => {
- init(visConfigSingleTerm);
- const otherAggConfig = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[0], singleTermResponse)();
- const mergedResponse = mergeOtherBucketAggResponse(
- vis.aggs,
- singleTermResponse,
- singleOtherResponse,
- vis.aggs.aggs[0],
- otherAggConfig
+ test('correctly merges other bucket with single terms agg', () => {
+ const aggConfigs = getAggConfigs(singleTerm.aggs);
+ const otherAggConfig = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[0] as IBucketAggConfig,
+ singleTermResponse
);
- expect(mergedResponse.aggregations['1'].buckets[3].key).to.equal('__other__');
+ expect(otherAggConfig).toBeDefined();
+ if (otherAggConfig) {
+ const mergedResponse = mergeOtherBucketAggResponse(
+ aggConfigs,
+ singleTermResponse,
+ singleOtherResponse,
+ aggConfigs.aggs[0] as IBucketAggConfig,
+ otherAggConfig()
+ );
+ expect(mergedResponse.aggregations['1'].buckets[3].key).toEqual('__other__');
+ }
});
- it('correctly merges other bucket with nested terms agg', () => {
- init(visConfigNestedTerm);
- const otherAggConfig = buildOtherBucketAgg(vis.aggs, vis.aggs.aggs[1], nestedTermResponse)();
- const mergedResponse = mergeOtherBucketAggResponse(
- vis.aggs,
- nestedTermResponse,
- nestedOtherResponse,
- vis.aggs.aggs[1],
- otherAggConfig
+ test('correctly merges other bucket with nested terms agg', () => {
+ const aggConfigs = getAggConfigs(nestedTerm.aggs);
+ const otherAggConfig = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[1] as IBucketAggConfig,
+ nestedTermResponse
);
- expect(mergedResponse.aggregations['1'].buckets[1]['2'].buckets[3].key).to.equal('__other__');
+ expect(otherAggConfig).toBeDefined();
+ if (otherAggConfig) {
+ const mergedResponse = mergeOtherBucketAggResponse(
+ aggConfigs,
+ nestedTermResponse,
+ nestedOtherResponse,
+ aggConfigs.aggs[1] as IBucketAggConfig,
+ otherAggConfig()
+ );
+
+ expect(mergedResponse.aggregations['1'].buckets[1]['2'].buckets[3].key).toEqual(
+ '__other__'
+ );
+ }
});
});
describe('updateMissingBucket', () => {
- it('correctly updates missing bucket key', () => {
- init(visConfigNestedTerm);
- const updatedResponse = updateMissingBucket(singleTermResponse, vis.aggs, vis.aggs.aggs[0]);
+ test('correctly updates missing bucket key', () => {
+ const aggConfigs = getAggConfigs(nestedTerm.aggs);
+ const updatedResponse = updateMissingBucket(
+ singleTermResponse,
+ aggConfigs,
+ aggConfigs.aggs[0] as IBucketAggConfig
+ );
expect(
- updatedResponse.aggregations['1'].buckets.find(bucket => bucket.key === '__missing__')
- ).to.not.be('undefined');
+ updatedResponse.aggregations['1'].buckets.find(
+ (bucket: Record) => bucket.key === '__missing__'
+ )
+ ).toBeDefined();
});
});
});
diff --git a/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.js b/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
similarity index 65%
rename from src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.js
rename to src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
index ddab360161744..42db37c81eadd 100644
--- a/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.js
+++ b/src/legacy/core_plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
@@ -17,21 +17,24 @@
* under the License.
*/
-import _ from 'lodash';
+import { isNumber, keys, values, find, each, cloneDeep, flatten } from 'lodash';
import { esFilters, esQuery } from '../../../../../../../plugins/data/public';
import { AggGroupNames } from '../agg_groups';
+import { IAggConfigs } from '../agg_configs';
+import { IBucketAggConfig } from './_bucket_agg_type';
/**
* walks the aggregation DSL and returns DSL starting at aggregation with id of startFromAggId
* @param aggNestedDsl: aggregation config DSL (top level)
* @param startFromId: id of an aggregation from where we want to get the nested DSL
*/
-const getNestedAggDSL = (aggNestedDsl, startFromAggId) => {
+const getNestedAggDSL = (aggNestedDsl: Record, startFromAggId: string): any => {
if (aggNestedDsl[startFromAggId]) {
return aggNestedDsl[startFromAggId];
}
- const nestedAggs = _.values(aggNestedDsl);
+ const nestedAggs: Array> = values(aggNestedDsl);
let aggs;
+
for (let i = 0; i < nestedAggs.length; i++) {
if (nestedAggs[i].aggs && (aggs = getNestedAggDSL(nestedAggs[i].aggs, startFromAggId))) {
return aggs;
@@ -46,27 +49,34 @@ const getNestedAggDSL = (aggNestedDsl, startFromAggId) => {
* @param aggWithOtherBucket: AggConfig of the aggregation with other bucket enabled
* @param key: key from the other bucket request for a specific other bucket
*/
-const getAggResultBuckets = (aggConfigs, response, aggWithOtherBucket, key) => {
+const getAggResultBuckets = (
+ aggConfigs: IAggConfigs,
+ response: any,
+ aggWithOtherBucket: IBucketAggConfig,
+ key: string
+) => {
const keyParts = key.split('-');
let responseAgg = response;
for (const i in keyParts) {
if (keyParts[i]) {
- const responseAggs = _.values(responseAgg);
+ const responseAggs: Array> = values(responseAgg);
// If you have multi aggs, we cannot just assume the first one is the `other` bucket,
// so we need to loop over each agg until we find it.
for (let aggId = 0; aggId < responseAggs.length; aggId++) {
- const agg = responseAggs[aggId];
- const aggKey = _.keys(responseAgg)[aggId];
- const aggConfig = _.find(aggConfigs.aggs, agg => agg.id === aggKey);
- const bucket = _.find(agg.buckets, (bucket, bucketObjKey) => {
- const bucketKey = aggConfig
- .getKey(bucket, Number.isInteger(bucketObjKey) ? null : bucketObjKey)
- .toString();
- return bucketKey === keyParts[i];
- });
- if (bucket) {
- responseAgg = bucket;
- break;
+ const aggById = responseAggs[aggId];
+ const aggKey = keys(responseAgg)[aggId];
+ const aggConfig = find(aggConfigs.aggs, agg => agg.id === aggKey);
+ if (aggConfig) {
+ const aggResultBucket = find(aggById.buckets, (bucket, bucketObjKey) => {
+ const bucketKey = aggConfig
+ .getKey(bucket, isNumber(bucketObjKey) ? undefined : bucketObjKey)
+ .toString();
+ return bucketKey === keyParts[i];
+ });
+ if (aggResultBucket) {
+ responseAgg = aggResultBucket;
+ break;
+ }
}
}
}
@@ -82,21 +92,20 @@ const getAggResultBuckets = (aggConfigs, response, aggWithOtherBucket, key) => {
* @param responseAggs: array of aggregations from response
* @param aggId: id of the aggregation with missing bucket
*/
-const getAggConfigResultMissingBuckets = (responseAggs, aggId) => {
+const getAggConfigResultMissingBuckets = (responseAggs: any, aggId: string) => {
const missingKey = '__missing__';
- let resultBuckets = [];
+ let resultBuckets: Array> = [];
if (responseAggs[aggId]) {
- const matchingBucket = responseAggs[aggId].buckets.find(bucket => bucket.key === missingKey);
+ const matchingBucket = responseAggs[aggId].buckets.find(
+ (bucket: Record) => bucket.key === missingKey
+ );
if (matchingBucket) resultBuckets.push(matchingBucket);
return resultBuckets;
}
- _.each(responseAggs, agg => {
+ each(responseAggs, agg => {
if (agg.buckets) {
- _.each(agg.buckets, bucket => {
- resultBuckets = [
- ...resultBuckets,
- ...getAggConfigResultMissingBuckets(bucket, aggId, missingKey),
- ];
+ each(agg.buckets, bucket => {
+ resultBuckets = [...resultBuckets, ...getAggConfigResultMissingBuckets(bucket, aggId)];
});
}
});
@@ -110,13 +119,24 @@ const getAggConfigResultMissingBuckets = (responseAggs, aggId) => {
* @param key: the key for this specific other bucket
* @param otherAgg: AggConfig of the aggregation with other bucket
*/
-const getOtherAggTerms = (requestAgg, key, otherAgg) => {
+const getOtherAggTerms = (
+ requestAgg: Record,
+ key: string,
+ otherAgg: IBucketAggConfig
+) => {
return requestAgg['other-filter'].filters.filters[key].bool.must_not
- .filter(filter => filter.match_phrase && filter.match_phrase[otherAgg.params.field.name])
- .map(filter => filter.match_phrase[otherAgg.params.field.name]);
+ .filter(
+ (filter: Record) =>
+ filter.match_phrase && filter.match_phrase[otherAgg.params.field.name]
+ )
+ .map((filter: Record) => filter.match_phrase[otherAgg.params.field.name]);
};
-export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) => {
+export const buildOtherBucketAgg = (
+ aggConfigs: IAggConfigs,
+ aggWithOtherBucket: IBucketAggConfig,
+ response: any
+) => {
const bucketAggs = aggConfigs.aggs.filter(agg => agg.type.type === AggGroupNames.Buckets);
const index = bucketAggs.findIndex(agg => agg.id === aggWithOtherBucket.id);
const aggs = aggConfigs.toDsl();
@@ -130,6 +150,7 @@ export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) =>
params: {
filters: [],
},
+ enabled: false,
},
{
addToAggConfigs: false,
@@ -145,25 +166,31 @@ export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) =>
let noAggBucketResults = false;
// recursively create filters for all parent aggregation buckets
- const walkBucketTree = (aggIndex, aggs, aggId, filters, key) => {
+ const walkBucketTree = (
+ aggIndex: number,
+ aggregations: any,
+ aggId: string,
+ filters: any[],
+ key: string
+ ) => {
// make sure there are actually results for the buckets
- if (aggs[aggId].buckets.length < 1) {
+ if (aggregations[aggId].buckets.length < 1) {
noAggBucketResults = true;
return;
}
- const agg = aggs[aggId];
+ const agg = aggregations[aggId];
const newAggIndex = aggIndex + 1;
const newAgg = bucketAggs[newAggIndex];
const currentAgg = bucketAggs[aggIndex];
if (aggIndex < index) {
- _.each(agg.buckets, (bucket, bucketObjKey) => {
+ each(agg.buckets, (bucket: any, bucketObjKey) => {
const bucketKey = currentAgg.getKey(
bucket,
- Number.isInteger(bucketObjKey) ? null : bucketObjKey
+ isNumber(bucketObjKey) ? undefined : bucketObjKey
);
- const filter = _.cloneDeep(bucket.filters) || currentAgg.createFilter(bucketKey);
- const newFilters = _.flatten([...filters, filter]);
+ const filter = cloneDeep(bucket.filters) || currentAgg.createFilter(bucketKey);
+ const newFilters = flatten([...filters, filter]);
walkBucketTree(
newAggIndex,
bucket,
@@ -177,7 +204,7 @@ export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) =>
if (
!aggWithOtherBucket.params.missingBucket ||
- agg.buckets.some(bucket => bucket.key === '__missing__')
+ agg.buckets.some((bucket: { key: string }) => bucket.key === '__missing__')
) {
filters.push(
esFilters.buildExistsFilter(
@@ -188,7 +215,7 @@ export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) =>
}
// create not filters for all the buckets
- _.each(agg.buckets, bucket => {
+ each(agg.buckets, bucket => {
if (bucket.key === '__missing__') return;
const filter = currentAgg.createFilter(bucket.key);
filter.meta.negate = true;
@@ -214,15 +241,15 @@ export const buildOtherBucketAgg = (aggConfigs, aggWithOtherBucket, response) =>
};
export const mergeOtherBucketAggResponse = (
- aggsConfig,
- response,
- otherResponse,
- otherAgg,
- requestAgg
+ aggsConfig: IAggConfigs,
+ response: any,
+ otherResponse: any,
+ otherAgg: IBucketAggConfig,
+ requestAgg: Record
) => {
- const updatedResponse = _.cloneDeep(response);
- _.each(otherResponse.aggregations['other-filter'].buckets, (bucket, key) => {
- if (!bucket.doc_count) return;
+ const updatedResponse = cloneDeep(response);
+ each(otherResponse.aggregations['other-filter'].buckets, (bucket, key) => {
+ if (!bucket.doc_count || key === undefined) return;
const bucketKey = key.replace(/^-/, '');
const aggResultBuckets = getAggResultBuckets(
aggsConfig,
@@ -241,7 +268,11 @@ export const mergeOtherBucketAggResponse = (
bucket.filters = [phraseFilter];
bucket.key = '__other__';
- if (aggResultBuckets.some(bucket => bucket.key === '__missing__')) {
+ if (
+ aggResultBuckets.some(
+ (aggResultBucket: Record) => aggResultBucket.key === '__missing__'
+ )
+ ) {
bucket.filters.push(
esFilters.buildExistsFilter(otherAgg.params.field, otherAgg.params.field.indexPattern)
);
@@ -251,8 +282,12 @@ export const mergeOtherBucketAggResponse = (
return updatedResponse;
};
-export const updateMissingBucket = (response, aggConfigs, agg) => {
- const updatedResponse = _.cloneDeep(response);
+export const updateMissingBucket = (
+ response: any,
+ aggConfigs: IAggConfigs,
+ agg: IBucketAggConfig
+) => {
+ const updatedResponse = cloneDeep(response);
const aggResultBuckets = getAggConfigResultMissingBuckets(updatedResponse.aggregations, agg.id);
aggResultBuckets.forEach(bucket => {
bucket.key = '__missing__';
diff --git a/src/legacy/core_plugins/data/public/search/aggs/buckets/terms.ts b/src/legacy/core_plugins/data/public/search/aggs/buckets/terms.ts
index 0ed44aa876744..8fd95c86d8476 100644
--- a/src/legacy/core_plugins/data/public/search/aggs/buckets/terms.ts
+++ b/src/legacy/core_plugins/data/public/search/aggs/buckets/terms.ts
@@ -39,7 +39,6 @@ import {
buildOtherBucketAgg,
mergeOtherBucketAggResponse,
updateMissingBucket,
- // @ts-ignore
} from './_terms_other_bucket_helper';
import { Schemas } from '../schemas';
import { AggGroupNames } from '../agg_groups';
diff --git a/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts b/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
index e85e9deff6ddf..bd05fa21bfd5d 100644
--- a/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
+++ b/src/legacy/core_plugins/data/public/search/expressions/build_tabular_inspector_data.ts
@@ -20,7 +20,7 @@
import { set } from 'lodash';
// @ts-ignore
import { FormattedData } from '../../../../../../plugins/inspector/public';
-// @ts-ignore
+
import { createFilter } from './create_filter';
import { TabbedTable } from '../tabify';
@@ -66,7 +66,10 @@ export async function buildTabularInspectorData(
row => row[`col-${colIndex}-${col.aggConfig.id}`].raw === value.raw
);
const filter = createFilter(aggConfigs, table, colIndex, rowIndex, value.raw);
- queryFilter.addFilters(filter);
+
+ if (filter) {
+ queryFilter.addFilters(filter);
+ }
}),
filterOut:
isCellContentFilterable &&
@@ -75,14 +78,17 @@ export async function buildTabularInspectorData(
row => row[`col-${colIndex}-${col.aggConfig.id}`].raw === value.raw
);
const filter = createFilter(aggConfigs, table, colIndex, rowIndex, value.raw);
- const notOther = value.raw !== '__other__';
- const notMissing = value.raw !== '__missing__';
- if (Array.isArray(filter)) {
- filter.forEach(f => set(f, 'meta.negate', notOther && notMissing));
- } else {
- set(filter, 'meta.negate', notOther && notMissing);
+
+ if (filter) {
+ const notOther = value.raw !== '__other__';
+ const notMissing = value.raw !== '__missing__';
+ if (Array.isArray(filter)) {
+ filter.forEach(f => set(f, 'meta.negate', notOther && notMissing));
+ } else {
+ set(filter, 'meta.negate', notOther && notMissing);
+ }
+ queryFilter.addFilters(filter);
}
- queryFilter.addFilters(filter);
}),
};
});
diff --git a/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts b/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts
new file mode 100644
index 0000000000000..890ec81778d4b
--- /dev/null
+++ b/src/legacy/core_plugins/data/public/search/expressions/create_filter.test.ts
@@ -0,0 +1,130 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import {
+ fieldFormats,
+ FieldFormatsGetConfigFn,
+ esFilters,
+} from '../../../../../../plugins/data/public';
+import { createFilter } from './create_filter';
+import { TabbedTable } from '../tabify';
+import { AggConfigs } from '../aggs/agg_configs';
+import { IAggConfig } from '../aggs/agg_config';
+import { mockDataServices, mockAggTypesRegistry } from '../aggs/test_helpers';
+
+describe('createFilter', () => {
+ let table: TabbedTable;
+ let aggConfig: IAggConfig;
+
+ const typesRegistry = mockAggTypesRegistry();
+
+ const getAggConfigs = (type: string, params: any) => {
+ const field = {
+ name: 'bytes',
+ filterable: true,
+ indexPattern: {
+ id: '1234',
+ },
+ format: new fieldFormats.BytesFormat({}, (() => {}) as FieldFormatsGetConfigFn),
+ };
+
+ const indexPattern = {
+ id: '1234',
+ title: 'logstash-*',
+ fields: {
+ getByName: () => field,
+ filter: () => [field],
+ },
+ } as any;
+
+ return new AggConfigs(
+ indexPattern,
+ [
+ {
+ id: type,
+ type,
+ schema: 'buckets',
+ params,
+ },
+ ],
+ { typesRegistry }
+ );
+ };
+
+ const aggConfigParams: Record = {
+ field: 'bytes',
+ interval: 30,
+ otherBucket: true,
+ };
+
+ beforeEach(() => {
+ table = {
+ columns: [
+ {
+ id: '1-1',
+ name: 'test',
+ aggConfig,
+ },
+ ],
+ rows: [
+ {
+ '1-1': '2048',
+ },
+ ],
+ };
+ mockDataServices();
+ });
+
+ test('ignores event when cell value is not provided', async () => {
+ aggConfig = getAggConfigs('histogram', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, -1, null);
+
+ expect(filters).not.toBeDefined();
+ });
+
+ test('handles an event when aggregations type is a terms', async () => {
+ aggConfig = getAggConfigs('terms', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, 0, 'test');
+
+ expect(filters).toBeDefined();
+
+ if (filters) {
+ expect(filters.length).toEqual(1);
+ expect(filters[0].query.match_phrase.bytes).toEqual('2048');
+ }
+ });
+
+ test('handles an event when aggregations type is not terms', async () => {
+ aggConfig = getAggConfigs('histogram', aggConfigParams).aggs[0];
+ const filters = await createFilter([aggConfig], table, 0, 0, 'test');
+
+ expect(filters).toBeDefined();
+
+ if (filters) {
+ expect(filters.length).toEqual(1);
+
+ const [rangeFilter] = filters;
+
+ if (esFilters.isRangeFilter(rangeFilter)) {
+ expect(rangeFilter.range.bytes.gte).toEqual(2048);
+ expect(rangeFilter.range.bytes.lt).toEqual(2078);
+ }
+ }
+ });
+});
diff --git a/src/legacy/core_plugins/data/public/search/expressions/create_filter.js b/src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
similarity index 78%
rename from src/legacy/core_plugins/data/public/search/expressions/create_filter.js
rename to src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
index 3f4028a9b5525..77e011932195c 100644
--- a/src/legacy/core_plugins/data/public/search/expressions/create_filter.js
+++ b/src/legacy/core_plugins/data/public/search/expressions/create_filter.ts
@@ -17,7 +17,11 @@
* under the License.
*/
-const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
+import { IAggConfig } from 'ui/agg_types';
+import { Filter } from '../../../../../../plugins/data/public';
+import { TabbedTable } from '../tabify';
+
+const getOtherBucketFilterTerms = (table: TabbedTable, columnIndex: number, rowIndex: number) => {
if (rowIndex === -1) {
return [];
}
@@ -41,11 +45,17 @@ const getOtherBucketFilterTerms = (table, columnIndex, rowIndex) => {
];
};
-const createFilter = (aggConfigs, table, columnIndex, rowIndex, cellValue) => {
+const createFilter = (
+ aggConfigs: IAggConfig[],
+ table: TabbedTable,
+ columnIndex: number,
+ rowIndex: number,
+ cellValue: any
+) => {
const column = table.columns[columnIndex];
const aggConfig = aggConfigs[columnIndex];
- let filter = [];
- const value = rowIndex > -1 ? table.rows[rowIndex][column.id] : cellValue;
+ let filter: Filter[] = [];
+ const value: any = rowIndex > -1 ? table.rows[rowIndex][column.id] : cellValue;
if (value === null || value === undefined || !aggConfig.isFilterable()) {
return;
}
@@ -56,6 +66,10 @@ const createFilter = (aggConfigs, table, columnIndex, rowIndex, cellValue) => {
filter = aggConfig.createFilter(value);
}
+ if (!filter) {
+ return;
+ }
+
if (!Array.isArray(filter)) {
filter = [filter];
}
diff --git a/src/legacy/core_plugins/input_control_vis/public/components/vis/__snapshots__/list_control.test.tsx.snap b/src/legacy/core_plugins/input_control_vis/public/components/vis/__snapshots__/list_control.test.tsx.snap
index 99482a4be2d7b..59ae99260cecd 100644
--- a/src/legacy/core_plugins/input_control_vis/public/components/vis/__snapshots__/list_control.test.tsx.snap
+++ b/src/legacy/core_plugins/input_control_vis/public/components/vis/__snapshots__/list_control.test.tsx.snap
@@ -25,6 +25,7 @@ exports[`renders ListControl 1`] = `
compressed={false}
data-test-subj="listControlSelect0"
fullWidth={false}
+ inputRef={[Function]}
isClearable={true}
isLoading={false}
onChange={[Function]}
diff --git a/src/legacy/core_plugins/input_control_vis/public/components/vis/list_control.tsx b/src/legacy/core_plugins/input_control_vis/public/components/vis/list_control.tsx
index d62adfdce56b4..d01cef15ea41b 100644
--- a/src/legacy/core_plugins/input_control_vis/public/components/vis/list_control.tsx
+++ b/src/legacy/core_plugins/input_control_vis/public/components/vis/list_control.tsx
@@ -58,8 +58,17 @@ class ListControlUi extends PureComponent {
+ if (this.textInput) {
+ this.textInput.setAttribute('focusable', 'false'); // remove when #59039 is fixed
+ }
this.isMounted = true;
};
@@ -67,6 +76,10 @@ class ListControlUi extends PureComponent {
+ this.textInput = ref;
+ };
+
handleOnChange = (selectedOptions: any[]) => {
const selectedValues = selectedOptions.map(({ value }) => {
return value;
@@ -143,6 +156,7 @@ class ListControlUi extends PureComponent
);
}
diff --git a/src/legacy/core_plugins/kibana/public/.eslintrc.js b/src/legacy/core_plugins/kibana/public/.eslintrc.js
index b3ee0a8fa7b04..e7171a5291d26 100644
--- a/src/legacy/core_plugins/kibana/public/.eslintrc.js
+++ b/src/legacy/core_plugins/kibana/public/.eslintrc.js
@@ -77,7 +77,7 @@ module.exports = {
{
basePath: path.resolve(__dirname, '../../../../../'),
zones: topLevelRestricedZones.concat(
- buildRestrictedPaths(['visualize', 'discover', 'dashboard', 'devTools', 'home'])
+ buildRestrictedPaths(['visualize', 'discover', 'dashboard', 'devTools'])
),
},
],
diff --git a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/clone_modal.test.js.snap b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/clone_modal.test.js.snap
index f5a00e5435ed6..771d53b73d960 100644
--- a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/clone_modal.test.js.snap
+++ b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/__snapshots__/clone_modal.test.js.snap
@@ -28,6 +28,7 @@ exports[`renders DashboardCloneModal 1`] = `
}
showCopyOnSave={true}
+ showDescription={false}
title="dash title"
/>
`;
diff --git a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/clone_modal.tsx b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/clone_modal.tsx
index e5e75e4b7d277..08e2b98d1c73d 100644
--- a/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/clone_modal.tsx
+++ b/src/legacy/core_plugins/kibana/public/dashboard/np_ready/top_nav/clone_modal.tsx
@@ -178,6 +178,9 @@ export class DashboardCloneModal extends React.Component {
{
showCopyOnSave={this.props.showCopyOnSave}
objectType="dashboard"
options={this.renderDashboardSaveOptions()}
+ showDescription={false}
/>
);
}
diff --git a/src/legacy/core_plugins/kibana/public/discover/build_services.ts b/src/legacy/core_plugins/kibana/public/discover/build_services.ts
index 6b0d2368cc1a2..c58307adaf38c 100644
--- a/src/legacy/core_plugins/kibana/public/discover/build_services.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/build_services.ts
@@ -33,11 +33,10 @@ import {
import { DiscoverStartPlugins } from './plugin';
import { SharePluginStart } from '../../../../../plugins/share/public';
-import { SavedSearch } from './np_ready/types';
import { DocViewsRegistry } from './np_ready/doc_views/doc_views_registry';
import { ChartsPluginStart } from '../../../../../plugins/charts/public';
import { VisualizationsStart } from '../../../visualizations/public';
-import { createSavedSearchesLoader } from '../../../../../plugins/discover/public';
+import { createSavedSearchesLoader, SavedSearch } from '../../../../../plugins/discover/public';
export interface DiscoverServices {
addBasePath: (path: string) => string;
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/context_state.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/context_state.ts
index 8fb6140d55e31..bf185f78941de 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/context_state.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/context_state.ts
@@ -24,9 +24,9 @@ import {
syncStates,
BaseStateContainer,
} from '../../../../../../../plugins/kibana_utils/public';
-import { esFilters, FilterManager, Filter } from '../../../../../../../plugins/data/public';
+import { esFilters, FilterManager, Filter, Query } from '../../../../../../../plugins/data/public';
-interface AppState {
+export interface AppState {
/**
* Columns displayed in the table, cannot be changed by UI, just in discover's main app
*/
@@ -47,6 +47,7 @@ interface AppState {
* Number of records to be fetched after the anchor records (older records)
*/
successorCount: number;
+ query?: Query;
}
interface GlobalState {
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
index 1ac54ad5dabee..bb693ab860221 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover.js
@@ -305,6 +305,7 @@ function discoverController(
defaultMessage:
'Save your Discover search so you can use it in visualizations and dashboards',
})}
+ showDescription={false}
/>
);
showSaveModal(saveModal, core.i18n.Context);
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/__snapshots__/tool_bar_pager_buttons.test.tsx.snap b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/__snapshots__/tool_bar_pager_buttons.test.tsx.snap
index a6aab8f74a674..20e503fd5ff91 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/__snapshots__/tool_bar_pager_buttons.test.tsx.snap
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/doc_table/components/pager/__snapshots__/tool_bar_pager_buttons.test.tsx.snap
@@ -5,6 +5,7 @@ exports[`it renders ToolBarPagerButtons 1`] = `
className="kuiButtonGroup"
>
@@ -41,6 +48,12 @@ export function ToolBarPagerButtons(props: Props) {
onClick={() => props.onPageNext()}
disabled={!props.hasNextPage}
data-test-subj="btnNextPage"
+ aria-label={i18n.translate(
+ 'kbn.discover.docTable.pager.toolbarPagerButtons.nextButtonAriaLabel',
+ {
+ defaultMessage: 'Next page in table',
+ }
+ )}
>
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
index 2bbeea9d675c7..100d9cdac133b 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/get_painless_error.ts
@@ -23,9 +23,9 @@ import { get } from 'lodash';
export function getPainlessError(error: Error) {
const rootCause: Array<{ lang: string; script: string }> | undefined = get(
error,
- 'resp.error.root_cause'
+ 'body.attributes.error.root_cause'
);
- const message: string = get(error, 'message');
+ const message: string = get(error, 'body.message');
if (!rootCause) {
return;
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/field_chooser.js b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/field_chooser.js
index a175a1aebebdf..df970ab5f2584 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/field_chooser.js
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/field_chooser.js
@@ -24,7 +24,11 @@ import './discover_field';
import './discover_field_search_directive';
import './discover_index_pattern_directive';
import fieldChooserTemplate from './field_chooser.html';
-import { IndexPatternFieldList } from '../../../../../../../../plugins/data/public';
+import {
+ IndexPatternFieldList,
+ KBN_FIELD_TYPES,
+} from '../../../../../../../../plugins/data/public';
+import { getMapsAppUrl, isFieldVisualizable, isMapsAppRegistered } from './lib/visualize_url_utils';
export function createFieldChooserDirective($location, config, $route) {
return {
@@ -186,8 +190,15 @@ export function createFieldChooserDirective($location, config, $route) {
return '';
}
+ if (
+ (field.type === KBN_FIELD_TYPES.GEO_POINT || field.type === KBN_FIELD_TYPES.GEO_SHAPE) &&
+ isMapsAppRegistered()
+ ) {
+ return getMapsAppUrl(field, $scope.indexPattern, $scope.state, $scope.columns);
+ }
+
let agg = {};
- const isGeoPoint = field.type === 'geo_point';
+ const isGeoPoint = field.type === KBN_FIELD_TYPES.GEO_POINT;
const type = isGeoPoint ? 'tile_map' : 'histogram';
// If we're visualizing a date field, and our index is time based (and thus has a time filter),
// then run a date histogram
@@ -243,7 +254,7 @@ export function createFieldChooserDirective($location, config, $route) {
$scope.computeDetails = function(field, recompute) {
if (_.isUndefined(field.details) || recompute) {
field.details = {
- visualizeUrl: field.visualizable ? getVisualizeUrl(field) : null,
+ visualizeUrl: isFieldVisualizable(field) ? getVisualizeUrl(field) : null,
...fieldCalculator.getFieldValueCounts({
hits: $scope.hits,
field: field,
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/detail_views/string.html b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/detail_views/string.html
index 5d134911fc91b..333dc472e956d 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/detail_views/string.html
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/detail_views/string.html
@@ -79,7 +79,7 @@
@@ -87,7 +87,7 @@
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/visualize_url_utils.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/visualize_url_utils.ts
new file mode 100644
index 0000000000000..8dbf3cd79ccb1
--- /dev/null
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/components/field_chooser/lib/visualize_url_utils.ts
@@ -0,0 +1,108 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import uuid from 'uuid/v4';
+// @ts-ignore
+import rison from 'rison-node';
+import {
+ IFieldType,
+ IIndexPattern,
+ KBN_FIELD_TYPES,
+} from '../../../../../../../../../plugins/data/public';
+import { AppState } from '../../../angular/context_state';
+import { getServices } from '../../../../kibana_services';
+
+function getMapsAppBaseUrl() {
+ const mapsAppVisAlias = getServices()
+ .visualizations.types.getAliases()
+ .find(({ name }) => {
+ return name === 'maps';
+ });
+ return mapsAppVisAlias ? mapsAppVisAlias.aliasUrl : null;
+}
+
+export function isMapsAppRegistered() {
+ return getServices()
+ .visualizations.types.getAliases()
+ .some(({ name }) => {
+ return name === 'maps';
+ });
+}
+
+export function isFieldVisualizable(field: IFieldType) {
+ if (
+ (field.type === KBN_FIELD_TYPES.GEO_POINT || field.type === KBN_FIELD_TYPES.GEO_SHAPE) &&
+ isMapsAppRegistered()
+ ) {
+ return true;
+ }
+ return field.visualizable;
+}
+
+export function getMapsAppUrl(
+ field: IFieldType,
+ indexPattern: IIndexPattern,
+ appState: AppState,
+ columns: string[]
+) {
+ const mapAppParams = new URLSearchParams();
+
+ // Copy global state
+ const locationSplit = window.location.href.split('discover?');
+ if (locationSplit.length > 1) {
+ const discoverParams = new URLSearchParams(locationSplit[1]);
+ const globalStateUrlValue = discoverParams.get('_g');
+ if (globalStateUrlValue) {
+ mapAppParams.set('_g', globalStateUrlValue);
+ }
+ }
+
+ // Copy filters and query in app state
+ const mapsAppState: any = {
+ filters: appState.filters || [],
+ };
+ if (appState.query) {
+ mapsAppState.query = appState.query;
+ }
+ // @ts-ignore
+ mapAppParams.set('_a', rison.encode(mapsAppState));
+
+ // create initial layer descriptor
+ const hasColumns = columns && columns.length && columns[0] !== '_source';
+ mapAppParams.set(
+ 'initialLayers',
+ // @ts-ignore
+ rison.encode_array([
+ {
+ id: uuid(),
+ label: indexPattern.title,
+ sourceDescriptor: {
+ id: uuid(),
+ type: 'ES_SEARCH',
+ geoField: field.name,
+ tooltipProperties: hasColumns ? columns : [],
+ indexPatternId: indexPattern.id,
+ },
+ visible: true,
+ type: 'VECTOR',
+ },
+ ])
+ );
+
+ return getServices().addBasePath(`${getMapsAppBaseUrl()}?${mapAppParams.toString()}`);
+}
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
index 738a74d93449d..0aaf3e7f156c1 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/search_embeddable.ts
@@ -37,7 +37,6 @@ import {
Embeddable,
} from '../../../../../embeddable_api/public/np_ready/public';
import * as columnActions from '../angular/doc_table/actions/columns';
-import { SavedSearch } from '../types';
import searchTemplate from './search_template.html';
import { ISearchEmbeddable, SearchInput, SearchOutput } from './types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
@@ -51,6 +50,7 @@ import {
ISearchSource,
} from '../../kibana_services';
import { SEARCH_EMBEDDABLE_TYPE } from './constants';
+import { SavedSearch } from '../../../../../../../plugins/discover/public';
interface SearchScope extends ng.IScope {
columns?: string[];
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
index e7aa390cda858..b20e9b2faf7c4 100644
--- a/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/embeddable/types.ts
@@ -18,9 +18,9 @@
*/
import { EmbeddableInput, EmbeddableOutput, IEmbeddable } from 'src/plugins/embeddable/public';
-import { SavedSearch } from '../types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
import { Filter, IIndexPattern, TimeRange, Query } from '../../../../../../../plugins/data/public';
+import { SavedSearch } from '../../../../../../../plugins/discover/public';
export interface SearchInput extends EmbeddableInput {
timeRange: TimeRange;
diff --git a/src/legacy/core_plugins/kibana/public/home/_index.scss b/src/legacy/core_plugins/kibana/public/home/_index.scss
deleted file mode 100644
index f42254c1096ce..0000000000000
--- a/src/legacy/core_plugins/kibana/public/home/_index.scss
+++ /dev/null
@@ -1 +0,0 @@
-@import 'np_ready/components/index';
diff --git a/src/legacy/core_plugins/kibana/public/home/plugin.ts b/src/legacy/core_plugins/kibana/public/home/plugin.ts
deleted file mode 100644
index f8c750cc80283..0000000000000
--- a/src/legacy/core_plugins/kibana/public/home/plugin.ts
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import {
- AppMountParameters,
- CoreSetup,
- CoreStart,
- Plugin,
- PluginInitializerContext,
-} from 'kibana/public';
-
-import { DataPublicPluginStart } from 'src/plugins/data/public';
-import { TelemetryPluginStart } from 'src/plugins/telemetry/public';
-import { setServices } from './kibana_services';
-import { KibanaLegacySetup } from '../../../../../plugins/kibana_legacy/public';
-import { UsageCollectionSetup } from '../../../../../plugins/usage_collection/public';
-import {
- Environment,
- HomePublicPluginStart,
- HomePublicPluginSetup,
-} from '../../../../../plugins/home/public';
-
-export interface HomePluginStartDependencies {
- data: DataPublicPluginStart;
- home: HomePublicPluginStart;
- telemetry?: TelemetryPluginStart;
-}
-
-export interface HomePluginSetupDependencies {
- usageCollection: UsageCollectionSetup;
- kibanaLegacy: KibanaLegacySetup;
- home: HomePublicPluginSetup;
-}
-
-export class HomePlugin implements Plugin {
- private dataStart: DataPublicPluginStart | null = null;
- private savedObjectsClient: any = null;
- private environment: Environment | null = null;
- private featureCatalogue: HomePublicPluginStart['featureCatalogue'] | null = null;
- private telemetry?: TelemetryPluginStart;
-
- constructor(private initializerContext: PluginInitializerContext) {}
-
- setup(
- core: CoreSetup,
- { home, kibanaLegacy, usageCollection }: HomePluginSetupDependencies
- ) {
- kibanaLegacy.registerLegacyApp({
- id: 'home',
- title: 'Home',
- mount: async (params: AppMountParameters) => {
- const trackUiMetric = usageCollection.reportUiStats.bind(usageCollection, 'Kibana_home');
- const [coreStart, { home: homeStart }] = await core.getStartServices();
- setServices({
- trackUiMetric,
- kibanaVersion: this.initializerContext.env.packageInfo.version,
- http: coreStart.http,
- toastNotifications: core.notifications.toasts,
- banners: coreStart.overlays.banners,
- docLinks: coreStart.docLinks,
- savedObjectsClient: this.savedObjectsClient!,
- chrome: coreStart.chrome,
- telemetry: this.telemetry,
- uiSettings: core.uiSettings,
- addBasePath: core.http.basePath.prepend,
- getBasePath: core.http.basePath.get,
- indexPatternService: this.dataStart!.indexPatterns,
- environment: this.environment!,
- config: kibanaLegacy.config,
- homeConfig: home.config,
- tutorialVariables: homeStart.tutorials.get,
- featureCatalogue: this.featureCatalogue!,
- });
- const { renderApp } = await import('./np_ready/application');
- return await renderApp(params.element);
- },
- });
- }
-
- start(core: CoreStart, { data, home, telemetry }: HomePluginStartDependencies) {
- this.environment = home.environment.get();
- this.featureCatalogue = home.featureCatalogue;
- this.dataStart = data;
- this.telemetry = telemetry;
- this.savedObjectsClient = core.savedObjects.client;
- }
-
- stop() {}
-}
diff --git a/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png b/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png
new file mode 100644
index 0000000000000..cc6ef0ce509eb
Binary files /dev/null and b/src/legacy/core_plugins/kibana/public/home/tutorial_resources/redisenterprise_metrics/screenshot.png differ
diff --git a/src/legacy/core_plugins/kibana/public/index.scss b/src/legacy/core_plugins/kibana/public/index.scss
index 3eef84c32db79..547f44652cf2b 100644
--- a/src/legacy/core_plugins/kibana/public/index.scss
+++ b/src/legacy/core_plugins/kibana/public/index.scss
@@ -13,15 +13,15 @@
// Discover styles
@import 'discover/index';
-// Home styles
-@import './home/index';
-
// Visualize styles
@import './visualize/index';
// Has to come after visualize because of some
// bad cascading in the Editor layout
@import 'src/legacy/ui/public/vis/index';
+// Home styles
+@import '../../../../plugins/home/public/application/index';
+
// Management styles
@import './management/index';
diff --git a/src/legacy/core_plugins/kibana/public/kibana.js b/src/legacy/core_plugins/kibana/public/kibana.js
index a83d1176a7197..04eaf2cbe2679 100644
--- a/src/legacy/core_plugins/kibana/public/kibana.js
+++ b/src/legacy/core_plugins/kibana/public/kibana.js
@@ -26,8 +26,6 @@ import { npSetup } from 'ui/new_platform';
// import the uiExports that we want to "use"
import 'uiExports/home';
-import 'uiExports/visTypes';
-
import 'uiExports/visualize';
import 'uiExports/savedObjectTypes';
import 'uiExports/fieldFormatEditors';
@@ -44,7 +42,6 @@ import 'uiExports/shareContextMenuExtensions';
import 'uiExports/interpreter';
import 'ui/autoload/all';
-import './home';
import './discover/legacy';
import './visualize/legacy';
import './dashboard/legacy';
diff --git a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
index 2f48ecc322fea..3a542cacc44be 100644
--- a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
+++ b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/_editor.scss
@@ -22,10 +22,6 @@ a tilemap in an iframe: https://github.com/elastic/kibana/issues/16457 */
}
}
-.visEditor__linkedMessage {
- padding: $euiSizeS;
-}
-
.visEditor__content {
@include flex-parent();
width: 100%;
diff --git a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
index 4979d9dc89a0c..9dbb05ea95b48 100644
--- a/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
+++ b/src/legacy/core_plugins/kibana/public/visualize/np_ready/editor/editor.html
@@ -1,28 +1,4 @@
-
-