,
+ 'security'
+> & {
+ security?: RouteSecurityGetter | RouteSecurity;
+};
diff --git a/packages/core/http/core-http-router-server-internal/src/router.test.ts b/packages/core/http/core-http-router-server-internal/src/router.test.ts
index 18589d5d39d52..65f5b41f91fba 100644
--- a/packages/core/http/core-http-router-server-internal/src/router.test.ts
+++ b/packages/core/http/core-http-router-server-internal/src/router.test.ts
@@ -232,6 +232,47 @@ describe('Router', () => {
);
});
+ it('throws if enabled security config is not valid', () => {
+ const router = new Router('', logger, enhanceWithContext, routerOptions);
+ expect(() =>
+ router.get(
+ {
+ path: '/',
+ validate: false,
+ security: {
+ authz: {
+ requiredPrivileges: [],
+ },
+ },
+ },
+ (context, req, res) => res.ok({})
+ )
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[authz.requiredPrivileges]: array size is [0], but cannot be smaller than [1]"`
+ );
+ });
+
+ it('throws if disabled security config does not provide opt-out reason', () => {
+ const router = new Router('', logger, enhanceWithContext, routerOptions);
+ expect(() =>
+ router.get(
+ {
+ path: '/',
+ validate: false,
+ security: {
+ // @ts-expect-error
+ authz: {
+ enabled: false,
+ },
+ },
+ },
+ (context, req, res) => res.ok({})
+ )
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[authz.reason]: expected value of type [string] but got [undefined]"`
+ );
+ });
+
it('should default `output: "stream" and parse: false` when no body validation is required but not a GET', () => {
const router = new Router('', logger, enhanceWithContext, routerOptions);
router.post({ path: '/', validate: {} }, (context, req, res) => res.ok({}));
diff --git a/packages/core/http/core-http-router-server-internal/src/router.ts b/packages/core/http/core-http-router-server-internal/src/router.ts
index a6f2ccc35f56b..ddfa8980cb8f2 100644
--- a/packages/core/http/core-http-router-server-internal/src/router.ts
+++ b/packages/core/http/core-http-router-server-internal/src/router.ts
@@ -26,9 +26,12 @@ import type {
RequestHandler,
VersionedRouter,
RouteRegistrar,
+ RouteSecurity,
} from '@kbn/core-http-server';
import { isZod } from '@kbn/zod';
import { validBodyOutput, getRequestValidation } from '@kbn/core-http-server';
+import type { RouteSecurityGetter } from '@kbn/core-http-server';
+import type { DeepPartial } from '@kbn/utility-types';
import { RouteValidator } from './validator';
import { CoreVersionedRouter } from './versioned_router';
import { CoreKibanaRequest } from './request';
@@ -38,6 +41,8 @@ import { wrapErrors } from './error_wrapper';
import { Method } from './versioned_router/types';
import { prepareRouteConfigValidation } from './util';
import { stripIllegalHttp2Headers } from './strip_illegal_http2_headers';
+import { validRouteSecurity } from './security_route_config_validator';
+import { InternalRouteConfig } from './route';
export type ContextEnhancer<
P,
@@ -61,7 +66,7 @@ function getRouteFullPath(routerPath: string, routePath: string) {
* undefined.
*/
function routeSchemasFromRouteConfig
(
- route: RouteConfig
,
+ route: InternalRouteConfig
,
routeMethod: RouteMethod
) {
// The type doesn't allow `validate` to be undefined, but it can still
@@ -93,7 +98,7 @@ function routeSchemasFromRouteConfig
- {i18n.translate('xpack.apm.customEmtpyState.title', {
- defaultMessage: 'Detect and resolve problems with your application',
- })}
-
-
-
- {i18n.translate('xpack.apm.customEmtpyState.description', {
- defaultMessage:
- 'Start collecting data for your applications and services so you can detect and resolve problems faster.',
- })}
-
-
-
-
-
-
- {i18n.translate('xpack.apm.customEmtpyState.title.reader', {
- defaultMessage: 'Add APM data',
- })}
-
-
- }
- description={i18n.translate('xpack.apm.customEmtpyState.card.description', {
- defaultMessage:
- 'Use APM agents to collect APM data. We make it easy with agents for many popular languages.',
- })}
- footer={
-
-
-
- {noDataConfig?.action.elasticAgent.title}
-
-
-
-
- {i18n.translate('xpack.apm.entityEnablement.content', {
- defaultMessage:
- 'Our new experience combines both APM-instrumented services with services detected from logs in a single service inventory.',
- })}
-
-
-
-
-
-
- {i18n.translate('xpack.apm.entityEnablement.footer', {
- defaultMessage: 'Learn more',
- })}
-
-
-
-
-
- )}
- {isEntityCentricExperienceViewEnabled && (
-
-
- {i18n.translate('xpack.apm.eemEnablement.restoreClassicView.', {
- defaultMessage: 'Restore classic view',
- })}
-
-
- )}
-
- setsIsUnauthorizedModalVisible(false)}
- label={label}
- />
-
- );
-}
diff --git a/x-pack/plugins/observability_solution/apm/public/context/entity_manager_context/entity_manager_context.tsx b/x-pack/plugins/observability_solution/apm/public/context/entity_manager_context/entity_manager_context.tsx
index 93205c907caa0..95a246ddce566 100644
--- a/x-pack/plugins/observability_solution/apm/public/context/entity_manager_context/entity_manager_context.tsx
+++ b/x-pack/plugins/observability_solution/apm/public/context/entity_manager_context/entity_manager_context.tsx
@@ -62,11 +62,6 @@ export function EntityManagerEnablementContextProvider({
true
);
- const isEntityCentricExperienceViewEnabled =
- isEntityManagerEnabled &&
- serviceInventoryViewLocalStorageSetting === ServiceInventoryView.entity &&
- isEntityCentricExperienceSettingEnabled;
-
function handleServiceInventoryViewChange(nextView: ServiceInventoryView) {
setServiceInventoryViewLocalStorageSetting(nextView);
// Updates the telemetry context variable every time the user switches views
@@ -88,7 +83,7 @@ export function EntityManagerEnablementContextProvider({
refetch,
serviceInventoryViewLocalStorageSetting,
setServiceInventoryViewLocalStorageSetting: handleServiceInventoryViewChange,
- isEntityCentricExperienceViewEnabled,
+ isEntityCentricExperienceViewEnabled: isEntityCentricExperienceSettingEnabled,
tourState,
updateTourState: handleTourStateUpdate,
}}
diff --git a/x-pack/plugins/observability_solution/apm/tsconfig.json b/x-pack/plugins/observability_solution/apm/tsconfig.json
index 9195c2547a71a..6f3ff13a2af3e 100644
--- a/x-pack/plugins/observability_solution/apm/tsconfig.json
+++ b/x-pack/plugins/observability_solution/apm/tsconfig.json
@@ -120,10 +120,6 @@
"@kbn/test-jest-helpers",
"@kbn/security-plugin-types-common",
"@kbn/entityManager-plugin",
- "@kbn/react-hooks",
- "@kbn/shared-ux-avatar-solution",
- "@kbn/shared-ux-page-no-data-config-types",
- "@kbn/react-hooks",
"@kbn/server-route-repository-utils",
"@kbn/core-analytics-browser",
"@kbn/apm-types",
@@ -131,6 +127,7 @@
"@kbn/serverless",
"@kbn/aiops-log-rate-analysis",
"@kbn/router-utils",
+ "@kbn/react-hooks",
],
"exclude": ["target/**/*"]
}
diff --git a/x-pack/plugins/observability_solution/inventory/public/types.ts b/x-pack/plugins/observability_solution/inventory/public/types.ts
index ed4a500edca68..2393b1b55e2b6 100644
--- a/x-pack/plugins/observability_solution/inventory/public/types.ts
+++ b/x-pack/plugins/observability_solution/inventory/public/types.ts
@@ -13,10 +13,10 @@ import {
EntityManagerPublicPluginStart,
} from '@kbn/entityManager-plugin/public';
import type { InferencePublicStart, InferencePublicSetup } from '@kbn/inference-plugin/public';
+import type { SharePluginSetup, SharePluginStart } from '@kbn/share-plugin/public';
import type { UnifiedSearchPublicPluginStart } from '@kbn/unified-search-plugin/public';
import type { DataViewsPublicPluginStart } from '@kbn/data-views-plugin/public';
import type { DataPublicPluginSetup, DataPublicPluginStart } from '@kbn/data-plugin/public';
-import type { SharePluginStart } from '@kbn/share-plugin/public';
/* eslint-disable @typescript-eslint/no-empty-interface*/
@@ -25,6 +25,7 @@ export interface ConfigSchema {}
export interface InventorySetupDependencies {
observabilityShared: ObservabilitySharedPluginSetup;
inference: InferencePublicSetup;
+ share: SharePluginSetup;
data: DataPublicPluginSetup;
entityManager: EntityManagerPublicPluginSetup;
}
diff --git a/x-pack/plugins/observability_solution/observability_shared/common/index.ts b/x-pack/plugins/observability_solution/observability_shared/common/index.ts
index d845ea1d398fd..d13e2b32839d6 100644
--- a/x-pack/plugins/observability_solution/observability_shared/common/index.ts
+++ b/x-pack/plugins/observability_solution/observability_shared/common/index.ts
@@ -178,6 +178,7 @@ export type {
ServiceEntityLocatorParams,
TransactionDetailsByTraceIdLocator,
TransactionDetailsByTraceIdLocatorParams,
+ EntitiesInventoryLocator,
} from './locators';
export {
@@ -201,6 +202,8 @@ export {
SERVICE_ENTITY_LOCATOR,
TransactionDetailsByTraceIdLocatorDefinition,
TRANSACTION_DETAILS_BY_TRACE_ID_LOCATOR,
+ EntitiesInventoryLocatorDefinition,
+ ENTITIES_INVENTORY_LOCATOR_ID,
} from './locators';
export { COMMON_OBSERVABILITY_GROUPING } from './embeddable_grouping';
diff --git a/x-pack/plugins/observability_solution/observability_shared/common/locators/entity_inventory/entity_inventory_locator.ts b/x-pack/plugins/observability_solution/observability_shared/common/locators/entity_inventory/entity_inventory_locator.ts
new file mode 100644
index 0000000000000..deb820b0d5e0a
--- /dev/null
+++ b/x-pack/plugins/observability_solution/observability_shared/common/locators/entity_inventory/entity_inventory_locator.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import type { SerializableRecord } from '@kbn/utility-types';
+import { LocatorDefinition, LocatorPublic } from '@kbn/share-plugin/common';
+
+export type EntitiesInventoryLocator = LocatorPublic;
+
+export const ENTITIES_INVENTORY_LOCATOR_ID = 'ENTITY_INVENTORY_LOCATOR';
+
+export class EntitiesInventoryLocatorDefinition implements LocatorDefinition {
+ public readonly id = ENTITIES_INVENTORY_LOCATOR_ID;
+
+ public readonly getLocation = async () => {
+ return {
+ app: 'observability',
+ path: `/inventory`,
+ state: {},
+ };
+ };
+}
diff --git a/x-pack/plugins/observability_solution/observability_shared/common/locators/index.ts b/x-pack/plugins/observability_solution/observability_shared/common/locators/index.ts
index 9c5ded4940d5a..34a6ff391d672 100644
--- a/x-pack/plugins/observability_solution/observability_shared/common/locators/index.ts
+++ b/x-pack/plugins/observability_solution/observability_shared/common/locators/index.ts
@@ -17,3 +17,4 @@ export * from './infra/metrics_explorer_locator';
export * from './profiling/flamegraph_locator';
export * from './profiling/stacktraces_locator';
export * from './profiling/topn_functions_locator';
+export * from './entity_inventory/entity_inventory_locator';
diff --git a/x-pack/plugins/observability_solution/observability_shared/public/plugin.ts b/x-pack/plugins/observability_solution/observability_shared/public/plugin.ts
index 7cd63d7be7602..7a131a2686ad0 100644
--- a/x-pack/plugins/observability_solution/observability_shared/public/plugin.ts
+++ b/x-pack/plugins/observability_solution/observability_shared/public/plugin.ts
@@ -45,6 +45,8 @@ import {
type MetricsExplorerLocator,
type ServiceEntityLocator,
type TransactionDetailsByTraceIdLocator,
+ type EntitiesInventoryLocator,
+ EntitiesInventoryLocatorDefinition,
} from '../common';
import { updateGlobalNavigation } from './services/update_global_navigation';
export interface ObservabilitySharedSetup {
@@ -82,6 +84,7 @@ interface ObservabilitySharedLocators {
transactionDetailsByTraceId: TransactionDetailsByTraceIdLocator;
serviceEntity: ServiceEntityLocator;
};
+ entitiesInventory: EntitiesInventoryLocator;
}
export class ObservabilitySharedPlugin implements Plugin {
@@ -159,6 +162,7 @@ export class ObservabilitySharedPlugin implements Plugin {
),
serviceEntity: urlService.locators.create(new ServiceEntityLocatorDefinition()),
},
+ entitiesInventory: urlService.locators.create(new EntitiesInventoryLocatorDefinition()),
};
}
}
From 05926c20c57b7abc69c6c068d5733f29306f73ba Mon Sep 17 00:00:00 2001
From: Ying Mao
Date: Mon, 30 Sep 2024 10:40:02 -0400
Subject: [PATCH 016/107] [Response Ops][Alerting] Use ES client to update rule
SO at end of rule run instead of SO client. (#193341)
Resolves https://github.com/elastic/kibana/issues/192397
## Summary
Updates alerting task runner end of run updates to use the ES client
update function for a true partial update instead of the saved objects
client update function that performs a GET then an update.
## To verify
Create a rule in multiple spaces and ensure they run correctly and their
execution status and monitoring history are updated at the end of each
run. Because we're performing a partial update on attributes that are
not in the AAD, the rule should continue running without any encryption
errors.
## Risk Matrix
| Risk | Probability | Severity | Mitigation/Notes |
|---------------------------|-------------|----------|-------------------------|
| Updating saved object directly using ES client will break BWC | Medium
| High | Response Ops follows an intermediate release strategy for any
changes to the rule saved object where schema changes are introduced in
an intermediate release before any changes to the saved object are
actually made in a followup release. This ensures that any rollbacks
that may be required in a release will roll back to a version that is
already aware of the new schema. The team is socialized to this strategy
as we are requiring users of the alerting framework to also follow this
strategy. This should address any backward compatibility issues that
might arise by circumventing the saved objects client update function. |
| Updating saved object directly using ES client will break AAD | Medium
| High | An explicit allowlist of non-AAD fields that are allowed to be
partially updated has been introduced and any fields not in this
allowlist will not be included in the partial update. Any updates to the
rule saved object that might break AAD would show up with > 1 execution
of a rule and we have a plethora of functional tests that rely on
multiple executions of a rule that would flag if there were issues
running due to AAD issues. |
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Elastic Machine
---
.../alerting/server/saved_objects/index.ts | 2 +-
.../partially_update_rule.test.ts | 164 +++++++++++++++++-
.../saved_objects/partially_update_rule.ts | 50 ++++++
.../alerting/server/task_runner/fixtures.ts | 89 +++++-----
.../server/task_runner/task_runner.test.ts | 51 ++----
.../server/task_runner/task_runner.ts | 19 +-
.../task_runner_alerts_client.test.ts | 18 +-
.../task_runner/task_runner_cancel.test.ts | 85 ++++-----
x-pack/plugins/alerting/tsconfig.json | 3 +-
9 files changed, 340 insertions(+), 141 deletions(-)
diff --git a/x-pack/plugins/alerting/server/saved_objects/index.ts b/x-pack/plugins/alerting/server/saved_objects/index.ts
index eb07a84950d14..a3bb0b4f0afe8 100644
--- a/x-pack/plugins/alerting/server/saved_objects/index.ts
+++ b/x-pack/plugins/alerting/server/saved_objects/index.ts
@@ -23,7 +23,7 @@ import { RawRule } from '../types';
import { getImportWarnings } from './get_import_warnings';
import { isRuleExportable } from './is_rule_exportable';
import { RuleTypeRegistry } from '../rule_type_registry';
-export { partiallyUpdateRule } from './partially_update_rule';
+export { partiallyUpdateRule, partiallyUpdateRuleWithEs } from './partially_update_rule';
import {
RULES_SETTINGS_SAVED_OBJECT_TYPE,
MAINTENANCE_WINDOW_SAVED_OBJECT_TYPE,
diff --git a/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.test.ts b/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.test.ts
index 5fcf23cbae6fb..294bc81481540 100644
--- a/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.test.ts
+++ b/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.test.ts
@@ -10,16 +10,23 @@ import {
ISavedObjectsRepository,
SavedObjectsErrorHelpers,
} from '@kbn/core/server';
-
-import { PartiallyUpdateableRuleAttributes, partiallyUpdateRule } from './partially_update_rule';
-import { savedObjectsClientMock } from '@kbn/core/server/mocks';
+import {
+ PartiallyUpdateableRuleAttributes,
+ partiallyUpdateRule,
+ partiallyUpdateRuleWithEs,
+} from './partially_update_rule';
+import { elasticsearchServiceMock, savedObjectsClientMock } from '@kbn/core/server/mocks';
import { RULE_SAVED_OBJECT_TYPE } from '.';
+import { ALERTING_CASES_SAVED_OBJECT_INDEX } from '@kbn/core-saved-objects-server';
+import { estypes } from '@elastic/elasticsearch';
+import { RuleExecutionStatuses } from '@kbn/alerting-types';
const MockSavedObjectsClientContract = savedObjectsClientMock.create();
const MockISavedObjectsRepository =
MockSavedObjectsClientContract as unknown as jest.Mocked;
+const esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
-describe('partially_update_rule', () => {
+describe('partiallyUpdateRule', () => {
beforeEach(() => {
jest.resetAllMocks();
});
@@ -104,6 +111,101 @@ describe('partially_update_rule', () => {
});
});
+describe('partiallyUpdateRuleWithEs', () => {
+ beforeEach(() => {
+ jest.resetAllMocks();
+ jest.clearAllMocks();
+ });
+
+ test('should work with no options', async () => {
+ esClient.update.mockResolvedValueOnce(MockEsUpdateResponse(MockRuleId));
+
+ await partiallyUpdateRuleWithEs(esClient, MockRuleId, DefaultAttributesForEsUpdate);
+ expect(esClient.update).toHaveBeenCalledTimes(1);
+ expect(esClient.update).toHaveBeenCalledWith({
+ id: `alert:${MockRuleId}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: DefaultAttributesForEsUpdate,
+ },
+ });
+ });
+
+ test('should strip unallowed attributes ', async () => {
+ const attributes =
+ AttributesForEsUpdateWithUnallowedFields as unknown as PartiallyUpdateableRuleAttributes;
+ esClient.update.mockResolvedValueOnce(MockEsUpdateResponse(MockRuleId));
+
+ await partiallyUpdateRuleWithEs(esClient, MockRuleId, attributes);
+ expect(esClient.update).toHaveBeenCalledWith({
+ id: `alert:${MockRuleId}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: DefaultAttributesForEsUpdate,
+ },
+ });
+ });
+
+ test('should handle ES errors', async () => {
+ esClient.update.mockRejectedValueOnce(new Error('wops'));
+
+ await expect(
+ partiallyUpdateRuleWithEs(esClient, MockRuleId, DefaultAttributes)
+ ).rejects.toThrowError('wops');
+ });
+
+ test('should handle the version option', async () => {
+ esClient.update.mockResolvedValueOnce(MockEsUpdateResponse(MockRuleId));
+
+ await partiallyUpdateRuleWithEs(esClient, MockRuleId, DefaultAttributesForEsUpdate, {
+ version: 'WzQsMV0=',
+ });
+ expect(esClient.update).toHaveBeenCalledWith({
+ id: `alert:${MockRuleId}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ if_primary_term: 1,
+ if_seq_no: 4,
+ doc: {
+ alert: DefaultAttributesForEsUpdate,
+ },
+ });
+ });
+
+ test('should handle the ignore404 option', async () => {
+ esClient.update.mockResolvedValueOnce(MockEsUpdateResponse(MockRuleId));
+
+ await partiallyUpdateRuleWithEs(esClient, MockRuleId, DefaultAttributesForEsUpdate, {
+ ignore404: true,
+ });
+ expect(esClient.update).toHaveBeenCalledWith(
+ {
+ id: `alert:${MockRuleId}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: DefaultAttributesForEsUpdate,
+ },
+ },
+ { ignore: [404] }
+ );
+ });
+
+ test('should handle the refresh option', async () => {
+ esClient.update.mockResolvedValueOnce(MockEsUpdateResponse(MockRuleId));
+
+ await partiallyUpdateRuleWithEs(esClient, MockRuleId, DefaultAttributesForEsUpdate, {
+ refresh: 'wait_for',
+ });
+ expect(esClient.update).toHaveBeenCalledWith({
+ id: `alert:${MockRuleId}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: DefaultAttributesForEsUpdate,
+ },
+ refresh: 'wait_for',
+ });
+ });
+});
+
function getMockSavedObjectClients(): Record<
string,
jest.Mocked
@@ -126,6 +228,50 @@ const DefaultAttributes = {
const ExtraneousAttributes = { ...DefaultAttributes, foo: 'bar' };
+const DefaultAttributesForEsUpdate = {
+ running: false,
+ executionStatus: {
+ status: 'active' as RuleExecutionStatuses,
+ lastExecutionDate: '2023-01-01T08:44:40.000Z',
+ lastDuration: 12,
+ error: null,
+ warning: null,
+ },
+ monitoring: {
+ run: {
+ calculated_metrics: {
+ success_ratio: 20,
+ },
+ history: [
+ {
+ success: true,
+ timestamp: 1640991880000,
+ duration: 12,
+ outcome: 'success',
+ },
+ ],
+ last_run: {
+ timestamp: '2023-01-01T08:44:40.000Z',
+ metrics: {
+ duration: 12,
+ gap_duration_s: null,
+ total_alerts_created: null,
+ total_alerts_detected: null,
+ total_indexing_duration_ms: null,
+ total_search_duration_ms: null,
+ },
+ },
+ },
+ },
+};
+
+const AttributesForEsUpdateWithUnallowedFields = {
+ ...DefaultAttributesForEsUpdate,
+ alertTypeId: 'foo',
+ consumer: 'consumer',
+ randomField: 'bar',
+};
+
const MockRuleId = 'rule-id';
const MockUpdateValue = {
@@ -137,3 +283,13 @@ const MockUpdateValue = {
},
references: [],
};
+
+const MockEsUpdateResponse = (id: string) => ({
+ _index: '.kibana_alerting_cases_9.0.0_001',
+ _id: `alert:${id}`,
+ _version: 3,
+ result: 'updated' as estypes.Result,
+ _shards: { total: 1, successful: 1, failed: 0 },
+ _seq_no: 5,
+ _primary_term: 1,
+});
diff --git a/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.ts b/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.ts
index 2665845a1110f..f9b4da5ed767b 100644
--- a/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.ts
+++ b/x-pack/plugins/alerting/server/saved_objects/partially_update_rule.ts
@@ -7,10 +7,13 @@
import { omit, pick } from 'lodash';
import {
+ ElasticsearchClient,
SavedObjectsClient,
SavedObjectsErrorHelpers,
SavedObjectsUpdateOptions,
} from '@kbn/core/server';
+import { decodeRequestVersion } from '@kbn/core-saved-objects-base-server-internal';
+import { ALERTING_CASES_SAVED_OBJECT_INDEX } from '@kbn/core-saved-objects-server';
import { RawRule } from '../types';
import {
@@ -67,3 +70,50 @@ export async function partiallyUpdateRule(
throw err;
}
}
+
+// Explicit list of attributes that we allow to be partially updated
+// There should be no overlap between this list and RuleAttributesIncludedInAAD or RuleAttributesToEncrypt
+const RuleAttributesAllowedForPartialUpdate = [
+ 'executionStatus',
+ 'lastRun',
+ 'monitoring',
+ 'nextRun',
+ 'running',
+];
+
+// direct, partial update to a rule saved object via ElasticsearchClient
+
+// we do this direct partial update to avoid the overhead of the SavedObjectsClient for
+// only these allow-listed fields which don't impact encryption. in addition, because these
+// fields are only updated by the system user at the end of a rule run, they should not
+// need to be included in any (user-centric) audit logs.
+export async function partiallyUpdateRuleWithEs(
+ esClient: ElasticsearchClient,
+ id: string,
+ attributes: PartiallyUpdateableRuleAttributes,
+ options: PartiallyUpdateRuleSavedObjectOptions = {}
+): Promise {
+ // ensure we only have the valid attributes that are not encrypted and are excluded from AAD
+ const attributeUpdates = omit(attributes, [
+ ...RuleAttributesToEncrypt,
+ ...RuleAttributesIncludedInAAD,
+ ]);
+ // ensure we only have attributes that we explicitly allow to be updated
+ const attributesAllowedForUpdate = pick(attributeUpdates, RuleAttributesAllowedForPartialUpdate);
+
+ const updateParams = {
+ id: `alert:${id}`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ ...(options.version ? decodeRequestVersion(options.version) : {}),
+ doc: {
+ alert: attributesAllowedForUpdate,
+ },
+ ...(options.refresh ? { refresh: options.refresh } : {}),
+ };
+
+ if (options.ignore404) {
+ await esClient.update(updateParams, { ignore: [404] });
+ } else {
+ await esClient.update(updateParams);
+ }
+}
diff --git a/x-pack/plugins/alerting/server/task_runner/fixtures.ts b/x-pack/plugins/alerting/server/task_runner/fixtures.ts
index ae8eccfcb1f86..5174aa9b965ec 100644
--- a/x-pack/plugins/alerting/server/task_runner/fixtures.ts
+++ b/x-pack/plugins/alerting/server/task_runner/fixtures.ts
@@ -7,6 +7,7 @@
import { TaskStatus } from '@kbn/task-manager-plugin/server';
import { SavedObject } from '@kbn/core/server';
+import { ALERTING_CASES_SAVED_OBJECT_INDEX } from '@kbn/core-saved-objects-server';
import {
Rule,
RuleTypeParams,
@@ -64,7 +65,7 @@ const defaultHistory = [
},
];
-export const generateSavedObjectParams = ({
+export const generateRuleUpdateParams = ({
error = null,
warning = null,
status = 'ok',
@@ -83,53 +84,59 @@ export const generateSavedObjectParams = ({
history?: RuleMonitoring['run']['history'];
alertsCount?: Record;
}) => [
- RULE_SAVED_OBJECT_TYPE,
- '1',
{
- monitoring: {
- run: {
- calculated_metrics: {
- success_ratio: successRatio,
+ id: `alert:1`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: {
+ monitoring: {
+ run: {
+ calculated_metrics: {
+ success_ratio: successRatio,
+ },
+ history,
+ last_run: {
+ timestamp: '1970-01-01T00:00:00.000Z',
+ metrics: {
+ duration: 0,
+ gap_duration_s: null,
+ total_alerts_created: null,
+ total_alerts_detected: null,
+ total_indexing_duration_ms: null,
+ total_search_duration_ms: null,
+ },
+ },
+ },
},
- history,
- last_run: {
- timestamp: '1970-01-01T00:00:00.000Z',
- metrics: {
- duration: 0,
- gap_duration_s: null,
- total_alerts_created: null,
- total_alerts_detected: null,
- total_indexing_duration_ms: null,
- total_search_duration_ms: null,
+ executionStatus: {
+ error,
+ lastDuration: 0,
+ lastExecutionDate: '1970-01-01T00:00:00.000Z',
+ status,
+ warning,
+ },
+ lastRun: {
+ outcome,
+ outcomeOrder: RuleLastRunOutcomeOrderMap[outcome],
+ outcomeMsg:
+ (error?.message && [error?.message]) ||
+ (warning?.message && [warning?.message]) ||
+ null,
+ warning: error?.reason || warning?.reason || null,
+ alertsCount: {
+ active: 0,
+ ignored: 0,
+ new: 0,
+ recovered: 0,
+ ...(alertsCount || {}),
},
},
+ nextRun,
+ running: false,
},
},
- executionStatus: {
- error,
- lastDuration: 0,
- lastExecutionDate: '1970-01-01T00:00:00.000Z',
- status,
- warning,
- },
- lastRun: {
- outcome,
- outcomeOrder: RuleLastRunOutcomeOrderMap[outcome],
- outcomeMsg:
- (error?.message && [error?.message]) || (warning?.message && [warning?.message]) || null,
- warning: error?.reason || warning?.reason || null,
- alertsCount: {
- active: 0,
- ignored: 0,
- new: 0,
- recovered: 0,
- ...(alertsCount || {}),
- },
- },
- nextRun,
- running: false,
},
- { refresh: false, namespace: undefined },
+ { ignore: [404] },
];
export const GENERIC_ERROR_MESSAGE = 'GENERIC ERROR MESSAGE';
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
index 438ffb3685e2a..e06c260109b76 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
@@ -59,7 +59,7 @@ import {
generateRunnerResult,
RULE_ACTIONS,
generateEnqueueFunctionInput,
- generateSavedObjectParams,
+ generateRuleUpdateParams,
mockTaskInstance,
GENERIC_ERROR_MESSAGE,
generateAlertInstance,
@@ -341,8 +341,8 @@ describe('Task Runner', () => {
testAlertingEventLogCalls({ status: 'ok' });
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(taskRunnerFactoryInitializerParams.executionContext.withContext).toBeCalledTimes(1);
@@ -2676,8 +2676,8 @@ describe('Task Runner', () => {
status: 'ok',
});
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(mockUsageCounter.incrementCounter).not.toHaveBeenCalled();
});
@@ -2789,10 +2789,8 @@ describe('Task Runner', () => {
});
await taskRunner.run();
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({
- nextRun: '1970-01-01T00:00:10.000Z',
- })
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({ nextRun: '1970-01-01T00:00:10.000Z' })
);
});
@@ -2825,21 +2823,14 @@ describe('Task Runner', () => {
);
await taskRunner.run();
ruleType.executor.mockClear();
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({
- error: {
- message: GENERIC_ERROR_MESSAGE,
- reason: 'execute',
- },
+
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({
+ error: { message: GENERIC_ERROR_MESSAGE, reason: 'execute' },
outcome: 'failed',
status: 'error',
successRatio: 0,
- history: [
- {
- success: false,
- timestamp: 0,
- },
- ],
+ history: [{ success: false, timestamp: 0 }],
})
);
});
@@ -2947,15 +2938,12 @@ describe('Task Runner', () => {
expect(actionsClient.bulkEnqueueExecution).toHaveBeenCalledTimes(1);
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({
status: 'warning',
outcome: 'warning',
warning,
- alertsCount: {
- active: 1,
- new: 1,
- },
+ alertsCount: { active: 1, new: 1 },
})
);
@@ -3117,15 +3105,12 @@ describe('Task Runner', () => {
expect(actionsClient.bulkEnqueueExecution).toHaveBeenCalledTimes(1);
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({
status: 'warning',
outcome: 'warning',
warning,
- alertsCount: {
- active: 2,
- new: 2,
- },
+ alertsCount: { active: 2, new: 2 },
})
);
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner.ts b/x-pack/plugins/alerting/server/task_runner/task_runner.ts
index e01dd73df7e58..b5a1854581bf3 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner.ts
@@ -42,7 +42,7 @@ import {
import { asErr, asOk, isErr, isOk, map, resolveErr, Result } from '../lib/result_type';
import { taskInstanceToAlertTaskInstance } from './alert_task_instance';
import { isAlertSavedObjectNotFoundError, isEsUnavailableError } from '../lib/is_alerting_error';
-import { partiallyUpdateRule, RULE_SAVED_OBJECT_TYPE } from '../saved_objects';
+import { partiallyUpdateRuleWithEs, RULE_SAVED_OBJECT_TYPE } from '../saved_objects';
import {
AlertInstanceContext,
AlertInstanceState,
@@ -204,7 +204,6 @@ export class TaskRunner<
private async updateRuleSavedObjectPostRun(
ruleId: string,
- namespace: string | undefined,
attributes: {
executionStatus?: RawRuleExecutionStatus;
monitoring?: RawRuleMonitoring;
@@ -212,7 +211,7 @@ export class TaskRunner<
lastRun?: RawRuleLastRun | null;
}
) {
- const client = this.internalSavedObjectsRepository;
+ const client = this.context.elasticsearch.client.asInternalUser;
try {
// Future engineer -> Here we are just checking if we need to wait for
// the update of the attribute `running` in the rule's saved object
@@ -223,13 +222,12 @@ export class TaskRunner<
// eslint-disable-next-line no-empty
} catch {}
try {
- await partiallyUpdateRule(
+ await partiallyUpdateRuleWithEs(
client,
ruleId,
{ ...attributes, running: false },
{
ignore404: true,
- namespace,
refresh: false,
}
);
@@ -548,7 +546,7 @@ export class TaskRunner<
const { executionStatus: execStatus, executionMetrics: execMetrics } =
await this.timer.runWithTimer(TaskRunnerTimerSpan.ProcessRuleRun, async () => {
const {
- params: { alertId: ruleId, spaceId },
+ params: { alertId: ruleId },
startedAt,
schedule: taskSchedule,
} = this.taskInstance;
@@ -560,8 +558,6 @@ export class TaskRunner<
nextRun = getNextRun({ startDate: startedAt, interval: taskSchedule.interval });
}
- const namespace = this.context.spaceIdToNamespace(spaceId);
-
const { executionStatus, executionMetrics, lastRun, outcome } = processRunResults({
logger: this.logger,
logPrefix: `${this.ruleType.id}:${ruleId}`,
@@ -602,7 +598,7 @@ export class TaskRunner<
)} - ${JSON.stringify(lastRun)}`
);
}
- await this.updateRuleSavedObjectPostRun(ruleId, namespace, {
+ await this.updateRuleSavedObjectPostRun(ruleId, {
executionStatus: ruleExecutionStatusToRaw(executionStatus),
nextRun,
lastRun: lastRunToRaw(lastRun),
@@ -758,11 +754,10 @@ export class TaskRunner<
// Write event log entry
const {
- params: { alertId: ruleId, spaceId, consumer },
+ params: { alertId: ruleId, consumer },
schedule: taskSchedule,
startedAt,
} = this.taskInstance;
- const namespace = this.context.spaceIdToNamespace(spaceId);
if (consumer && !this.ruleConsumer) {
this.ruleConsumer = consumer;
@@ -803,7 +798,7 @@ export class TaskRunner<
`Updating rule task for ${this.ruleType.id} rule with id ${ruleId} - execution error due to timeout`
);
const outcome = 'failed';
- await this.updateRuleSavedObjectPostRun(ruleId, namespace, {
+ await this.updateRuleSavedObjectPostRun(ruleId, {
executionStatus: ruleExecutionStatusToRaw(executionStatus),
lastRun: {
outcome,
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner_alerts_client.test.ts b/x-pack/plugins/alerting/server/task_runner/task_runner_alerts_client.test.ts
index c116230016e9b..6c7331de463ea 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner_alerts_client.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner_alerts_client.test.ts
@@ -46,7 +46,7 @@ import {
RULE_NAME,
generateRunnerResult,
RULE_ACTIONS,
- generateSavedObjectParams,
+ generateRuleUpdateParams,
mockTaskInstance,
DATE_1970,
DATE_1970_5_MIN,
@@ -376,8 +376,8 @@ describe('Task Runner', () => {
{ tags: ['1', 'test'] }
);
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(taskRunnerFactoryInitializerParams.executionContext.withContext).toBeCalledTimes(1);
@@ -510,8 +510,8 @@ describe('Task Runner', () => {
'ruleRunMetrics for test:1: {"numSearches":3,"totalSearchDurationMs":23423,"esSearchDurationMs":33,"numberOfTriggeredActions":0,"numberOfGeneratedActions":0,"numberOfActiveAlerts":0,"numberOfRecoveredAlerts":0,"numberOfNewAlerts":0,"numberOfDelayedAlerts":0,"hasReachedAlertLimit":false,"hasReachedQueuedActionsLimit":false,"triggeredActionsStatus":"complete"}',
{ tags: ['1', 'test'] }
);
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(taskRunnerFactoryInitializerParams.executionContext.withContext).toBeCalledTimes(1);
expect(
@@ -708,8 +708,8 @@ describe('Task Runner', () => {
tags: ['1', 'test'],
});
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(taskRunnerFactoryInitializerParams.executionContext.withContext).toBeCalledTimes(1);
@@ -799,8 +799,8 @@ describe('Task Runner', () => {
tags: ['1', 'test'],
});
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- ...generateSavedObjectParams({})
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
+ ...generateRuleUpdateParams({})
);
expect(taskRunnerFactoryInitializerParams.executionContext.withContext).toBeCalledTimes(1);
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner_cancel.test.ts b/x-pack/plugins/alerting/server/task_runner/task_runner_cancel.test.ts
index 3a6a9547fb902..e5572707ae6fd 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner_cancel.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner_cancel.test.ts
@@ -63,6 +63,7 @@ import { TaskRunnerContext } from './types';
import { backfillClientMock } from '../backfill_client/backfill_client.mock';
import { UntypedNormalizedRuleType } from '../rule_type_registry';
import { rulesSettingsServiceMock } from '../rules_settings/rules_settings_service.mock';
+import { ALERTING_CASES_SAVED_OBJECT_INDEX } from '@kbn/core-saved-objects-server';
import { maintenanceWindowsServiceMock } from './maintenance_windows/maintenance_windows_service.mock';
jest.mock('uuid', () => ({
@@ -225,53 +226,57 @@ describe('Task Runner Cancel', () => {
testAlertingEventLogCalls({ status: 'ok' });
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledTimes(1);
- expect(internalSavedObjectsRepository.update).toHaveBeenCalledWith(
- RULE_SAVED_OBJECT_TYPE,
- '1',
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledTimes(1);
+ expect(elasticsearchService.client.asInternalUser.update).toHaveBeenCalledWith(
{
- executionStatus: {
- error: {
- message: `test:1: execution cancelled due to timeout - exceeded rule type timeout of 5m`,
- reason: 'timeout',
- },
- lastDuration: 0,
- lastExecutionDate: '1970-01-01T00:00:00.000Z',
- status: 'error',
- warning: null,
- },
- lastRun: {
- alertsCount: {},
- outcome: 'failed',
- outcomeMsg: [
- 'test:1: execution cancelled due to timeout - exceeded rule type timeout of 5m',
- ],
- outcomeOrder: 20,
- warning: 'timeout',
- },
- monitoring: {
- run: {
- calculated_metrics: {
- success_ratio: 0,
+ id: `alert:1`,
+ index: ALERTING_CASES_SAVED_OBJECT_INDEX,
+ doc: {
+ alert: {
+ executionStatus: {
+ error: {
+ message: `test:1: execution cancelled due to timeout - exceeded rule type timeout of 5m`,
+ reason: 'timeout',
+ },
+ lastDuration: 0,
+ lastExecutionDate: '1970-01-01T00:00:00.000Z',
+ status: 'error',
+ warning: null,
+ },
+ lastRun: {
+ alertsCount: {},
+ outcome: 'failed',
+ outcomeMsg: [
+ 'test:1: execution cancelled due to timeout - exceeded rule type timeout of 5m',
+ ],
+ outcomeOrder: 20,
+ warning: 'timeout',
},
- history: [],
- last_run: {
- metrics: {
- duration: 0,
- gap_duration_s: null,
- total_alerts_created: null,
- total_alerts_detected: null,
- total_indexing_duration_ms: null,
- total_search_duration_ms: null,
+ monitoring: {
+ run: {
+ calculated_metrics: {
+ success_ratio: 0,
+ },
+ history: [],
+ last_run: {
+ metrics: {
+ duration: 0,
+ gap_duration_s: null,
+ total_alerts_created: null,
+ total_alerts_detected: null,
+ total_indexing_duration_ms: null,
+ total_search_duration_ms: null,
+ },
+ timestamp: '1970-01-01T00:00:00.000Z',
+ },
},
- timestamp: '1970-01-01T00:00:00.000Z',
},
+ nextRun: '1970-01-01T00:00:10.000Z',
+ running: false,
},
},
- nextRun: '1970-01-01T00:00:10.000Z',
- running: false,
},
- { refresh: false, namespace: undefined }
+ { ignore: [404] }
);
expect(mockUsageCounter.incrementCounter).toHaveBeenCalledTimes(1);
expect(mockUsageCounter.incrementCounter).toHaveBeenCalledWith({
diff --git a/x-pack/plugins/alerting/tsconfig.json b/x-pack/plugins/alerting/tsconfig.json
index c09816222b010..c0951663a8489 100644
--- a/x-pack/plugins/alerting/tsconfig.json
+++ b/x-pack/plugins/alerting/tsconfig.json
@@ -72,7 +72,8 @@
"@kbn/alerting-state-types",
"@kbn/core-security-server",
"@kbn/core-http-server",
- "@kbn/zod"
+ "@kbn/zod",
+ "@kbn/core-saved-objects-base-server-internal"
],
"exclude": [
"target/**/*"
From 2a935dcce37bd3e3f3fce32d6257b2ec7191dac5 Mon Sep 17 00:00:00 2001
From: Nathan L Smith
Date: Mon, 30 Sep 2024 09:55:49 -0500
Subject: [PATCH 017/107] Hide progress bar on initial load when client prefers
reduced motion (#194365)
## Summary
See
https://developer.mozilla.org/en-US/docs/Web/CSS/@media/prefers-reduced-motion
![CleanShot 2024-09-29 at 21 57
36](https://github.com/user-attachments/assets/27436ec4-986b-4c91-9d9f-e49d59d76e7c)
---
.../apps/core-apps-server-internal/assets/legacy_styles.css | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/packages/core/apps/core-apps-server-internal/assets/legacy_styles.css b/packages/core/apps/core-apps-server-internal/assets/legacy_styles.css
index 55ed0fbfdf8ba..ea149e7fec132 100644
--- a/packages/core/apps/core-apps-server-internal/assets/legacy_styles.css
+++ b/packages/core/apps/core-apps-server-internal/assets/legacy_styles.css
@@ -114,3 +114,9 @@ body, html {
transform: scaleX(1) translateX(100%);
}
}
+
+@media (prefers-reduced-motion) {
+ .kbnProgress {
+ display: none;
+ }
+}
From 508141423e7d18ce87628d826628d161c3292418 Mon Sep 17 00:00:00 2001
From: Hanna Tamoudi
Date: Mon, 30 Sep 2024 17:46:05 +0200
Subject: [PATCH 018/107] [Automatic Import] add fields mapping to readme
(#193717)
---
.../build_integration.test.ts | 62 ++++--
.../integration_builder/build_integration.ts | 43 +++--
.../integration_builder/data_stream.test.ts | 12 ++
.../server/integration_builder/data_stream.ts | 37 +++-
.../server/integration_builder/fields.test.ts | 34 ++++
.../server/integration_builder/fields.ts | 20 +-
.../integration_builder/readme_files.test.ts | 182 ++++++++++++++++++
.../integration_builder/readme_files.ts | 42 ++++
.../server/templates/build_readme.md.njk | 8 +
.../server/templates/package_readme.md.njk | 43 ++---
.../server/templates/readme.njk | 31 +++
.../server/util/samples.test.ts | 159 ++++++++++++++-
.../server/util/samples.ts | 43 ++++-
13 files changed, 634 insertions(+), 82 deletions(-)
create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/readme_files.test.ts
create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/readme_files.ts
create mode 100644 x-pack/plugins/integration_assistant/server/templates/build_readme.md.njk
create mode 100644 x-pack/plugins/integration_assistant/server/templates/readme.njk
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.test.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.test.ts
index e8800af12653f..419e287e23bf7 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.test.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.test.ts
@@ -14,6 +14,7 @@ import { createAgentInput } from './agent';
import { createPipeline } from './pipeline';
import { DataStream, Docs, InputType, Pipeline, Integration } from '../../common';
import yaml from 'js-yaml';
+import { createReadme } from './readme_files';
const mockedDataPath = 'path';
const mockedId = 123;
@@ -23,6 +24,10 @@ jest.mock('./data_stream');
jest.mock('./fields');
jest.mock('./agent');
jest.mock('./pipeline');
+jest.mock('./readme_files');
+
+(createFieldMapping as jest.Mock).mockReturnValue([]);
+(createDataStream as jest.Mock).mockReturnValue([]);
(generateUniqueId as jest.Mock).mockReturnValue(mockedId);
@@ -106,22 +111,11 @@ describe('buildPackage', () => {
// _dev files
expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/_dev/build`);
- expect(createSync).toHaveBeenCalledWith(
- `${integrationPath}/_dev/build/docs/README.md`,
- expect.any(String)
- );
expect(createSync).toHaveBeenCalledWith(
`${integrationPath}/_dev/build/build.yml`,
expect.any(String)
);
- // Docs files
- expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/docs/`);
- expect(createSync).toHaveBeenCalledWith(
- `${integrationPath}/docs/README.md`,
- expect.any(String)
- );
-
// Changelog file
expect(createSync).toHaveBeenCalledWith(`${integrationPath}/changelog.yml`, expect.any(String));
@@ -188,6 +182,52 @@ describe('buildPackage', () => {
secondDataStreamDocs
);
});
+
+ it('Should call createReadme once with sorted fields', async () => {
+ jest.clearAllMocks();
+
+ const firstDSFieldsMapping = [{ name: 'name a', description: 'description 1', type: 'type 1' }];
+
+ const firstDataStreamFields = [
+ { name: 'name b', description: 'description 1', type: 'type 1' },
+ ];
+
+ const secondDSFieldsMapping = [
+ { name: 'name c', description: 'description 2', type: 'type 2' },
+ { name: 'name e', description: 'description 3', type: 'type 3' },
+ ];
+
+ const secondDataStreamFields = [
+ { name: 'name d', description: 'description 2', type: 'type 2' },
+ ];
+
+ (createFieldMapping as jest.Mock).mockReturnValueOnce(firstDSFieldsMapping);
+ (createDataStream as jest.Mock).mockReturnValueOnce(firstDataStreamFields);
+
+ (createFieldMapping as jest.Mock).mockReturnValueOnce(secondDSFieldsMapping);
+ (createDataStream as jest.Mock).mockReturnValueOnce(secondDataStreamFields);
+
+ await buildPackage(testIntegration);
+
+ expect(createReadme).toHaveBeenCalledWith(integrationPath, testIntegration.name, [
+ {
+ datastream: firstDatastreamName,
+ fields: [
+ { name: 'name a', description: 'description 1', type: 'type 1' },
+
+ { name: 'name b', description: 'description 1', type: 'type 1' },
+ ],
+ },
+ {
+ datastream: secondDatastreamName,
+ fields: [
+ { name: 'name c', description: 'description 2', type: 'type 2' },
+ { name: 'name d', description: 'description 2', type: 'type 2' },
+ { name: 'name e', description: 'description 3', type: 'type 3' },
+ ],
+ },
+ ]);
+ });
});
describe('renderPackageManifestYAML', () => {
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts
index b9bc1b55268d7..8743ada38bdb6 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts
@@ -16,6 +16,8 @@ import { createAgentInput } from './agent';
import { createDataStream } from './data_stream';
import { createFieldMapping } from './fields';
import { createPipeline } from './pipeline';
+import { createReadme } from './readme_files';
+import { Field, flattenObjectsList } from '../util/samples';
const initialVersion = '1.0.0';
@@ -37,17 +39,27 @@ export async function buildPackage(integration: Integration): Promise {
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);
const dataStreamsDir = joinPath(packageDir, 'data_stream');
-
- for (const dataStream of integration.dataStreams) {
+ const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);
- createDataStream(integration.name, specificDataStreamDir, dataStream);
+ const dataStreamFields = createDataStream(integration.name, specificDataStreamDir, dataStream);
createAgentInput(specificDataStreamDir, dataStream.inputTypes);
createPipeline(specificDataStreamDir, dataStream.pipeline);
- createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs);
- }
+ const fields = createFieldMapping(
+ integration.name,
+ dataStreamName,
+ specificDataStreamDir,
+ dataStream.docs
+ );
+
+ return {
+ datastream: dataStreamName,
+ fields: mergeAndSortFields(fields, dataStreamFields),
+ };
+ });
+ createReadme(packageDir, integration.name, fieldsPerDatastream);
const zipBuffer = await createZipArchive(workingDir, packageDirectoryName);
removeDirSync(workingDir);
@@ -67,7 +79,6 @@ function createDirectories(
}
function createPackage(packageDir: string, integration: Integration): void {
- createReadme(packageDir, integration);
createChangelog(packageDir);
createBuildFile(packageDir);
createPackageManifest(packageDir, integration);
@@ -102,20 +113,6 @@ function createChangelog(packageDir: string): void {
createSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate);
}
-function createReadme(packageDir: string, integration: Integration) {
- const readmeDirPath = joinPath(packageDir, '_dev/build/docs/');
- const mainReadmeDirPath = joinPath(packageDir, 'docs/');
- ensureDirSync(mainReadmeDirPath);
- ensureDirSync(readmeDirPath);
- const readmeTemplate = nunjucks.render('package_readme.md.njk', {
- package_name: integration.name,
- data_streams: integration.dataStreams,
- });
-
- createSync(joinPath(readmeDirPath, 'README.md'), readmeTemplate);
- createSync(joinPath(mainReadmeDirPath, 'README.md'), readmeTemplate);
-}
-
async function createZipArchive(workingDir: string, packageDirectoryName: string): Promise {
const tmpPackageDir = joinPath(workingDir, packageDirectoryName);
const zip = new AdmZip();
@@ -124,6 +121,12 @@ async function createZipArchive(workingDir: string, packageDirectoryName: string
return buffer;
}
+function mergeAndSortFields(fields: Field[], dataStreamFields: Field[]): Field[] {
+ const mergedFields = [...fields, ...dataStreamFields];
+
+ return flattenObjectsList(mergedFields);
+}
+
/* eslint-disable @typescript-eslint/naming-convention */
/**
* Creates a package manifest dictionary.
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.test.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.test.ts
index 550c6118636cc..0a269fa07a1c8 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.test.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.test.ts
@@ -81,4 +81,16 @@ describe('createDataStream', () => {
expect(render).toHaveBeenCalledWith(`filestream_manifest.yml.njk`, expect.anything());
expect(render).toHaveBeenCalledWith(`azure_eventhub_manifest.yml.njk`, expect.anything());
});
+
+ it('Should return the list of fields', async () => {
+ const fields = createDataStream(packageName, dataStreamPath, firstDataStream);
+
+ expect(Array.isArray(fields)).toBe(true);
+ fields.forEach((field) => {
+ expect(field).toMatchObject({
+ name: expect.any(String),
+ type: expect.any(String),
+ });
+ });
+ });
});
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts
index 02b3f12f53d68..d66ee1958b3ea 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts
@@ -7,14 +7,16 @@
import nunjucks from 'nunjucks';
import { join as joinPath } from 'path';
+import { load } from 'js-yaml';
import type { DataStream } from '../../common';
-import { copySync, createSync, ensureDirSync, listDirSync } from '../util';
+import { copySync, createSync, ensureDirSync, listDirSync, readSync } from '../util';
+import { Field } from '../util/samples';
export function createDataStream(
packageName: string,
specificDataStreamDir: string,
dataStream: DataStream
-): void {
+): Field[] {
const dataStreamName = dataStream.name;
const pipelineDir = joinPath(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline');
const title = dataStream.title;
@@ -23,7 +25,7 @@ export function createDataStream(
const useMultilineNDJSON = samplesFormat.name === 'ndjson' && samplesFormat.multiline === true;
ensureDirSync(specificDataStreamDir);
- createDataStreamFolders(specificDataStreamDir, pipelineDir);
+ const fields = createDataStreamFolders(specificDataStreamDir, pipelineDir);
createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName);
const dataStreams: string[] = [];
@@ -51,19 +53,34 @@ export function createDataStream(
});
createSync(joinPath(specificDataStreamDir, 'manifest.yml'), finalManifest);
+
+ return fields;
+}
+
+function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): Field[] {
+ ensureDirSync(pipelineDir);
+ return copyFilesFromTemplateDir(specificDataStreamDir);
}
-function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void {
+function copyFilesFromTemplateDir(specificDataStreamDir: string): Field[] {
const dataStreamTemplatesDir = joinPath(__dirname, '../templates/data_stream');
const items = listDirSync(dataStreamTemplatesDir);
+ return items.flatMap((item) => {
+ const sourcePath = joinPath(dataStreamTemplatesDir, item);
+ const destinationPath = joinPath(specificDataStreamDir, item);
+ copySync(sourcePath, destinationPath);
+ const files = listDirSync(sourcePath);
- for (const item of items) {
- const s = joinPath(dataStreamTemplatesDir, item);
- const d = joinPath(specificDataStreamDir, item);
- copySync(s, d);
- }
+ return loadFieldsFromFiles(sourcePath, files);
+ });
+}
- ensureDirSync(pipelineDir);
+function loadFieldsFromFiles(sourcePath: string, files: string[]): Field[] {
+ return files.flatMap((file) => {
+ const filePath = joinPath(sourcePath, file);
+ const content = readSync(filePath);
+ return load(content) as Field[];
+ });
}
function createPipelineTests(
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.test.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.test.ts
index a657f699cfff9..bb76577d64fd3 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.test.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.test.ts
@@ -65,4 +65,38 @@ describe('createFieldMapping', () => {
);
expect(createSync).toHaveBeenCalledWith(`${dataStreamPath}/fields/fields.yml`, expectedFields);
});
+
+ it('Should return all fields flattened', async () => {
+ const docs: Docs = [
+ {
+ key: 'foo',
+ anotherKey: 'bar',
+ },
+ ];
+
+ const baseFields = `- name: data_stream.type
+ type: constant_keyword
+ description: Data stream type.
+- name: data_stream.dataset
+ type: constant_keyword
+- name: "@timestamp"
+ type: date
+ description: Event timestamp.
+`;
+ (render as jest.Mock).mockReturnValue(baseFields);
+
+ const fieldsResult = createFieldMapping(packageName, dataStreamName, dataStreamPath, docs);
+
+ expect(fieldsResult).toEqual([
+ {
+ name: 'data_stream.type',
+ type: 'constant_keyword',
+ description: 'Data stream type.',
+ },
+ { name: 'data_stream.dataset', type: 'constant_keyword' },
+ { name: '@timestamp', type: 'date', description: 'Event timestamp.' },
+ { name: 'key', type: 'keyword' },
+ { name: 'anotherKey', type: 'keyword' },
+ ]);
+ });
});
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts
index 79977ef2f3927..476bc0e74d697 100644
--- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts
@@ -6,7 +6,8 @@
*/
import nunjucks from 'nunjucks';
-
+import { load } from 'js-yaml';
+import { Field } from '../util/samples';
import { createSync, generateFields, mergeSamples } from '../util';
export function createFieldMapping(
@@ -14,28 +15,33 @@ export function createFieldMapping(
dataStreamName: string,
specificDataStreamDir: string,
docs: object[]
-): void {
+): Field[] {
const dataStreamFieldsDir = `${specificDataStreamDir}/fields`;
- createBaseFields(dataStreamFieldsDir, packageName, dataStreamName);
- createCustomFields(dataStreamFieldsDir, docs);
+ const baseFields = createBaseFields(dataStreamFieldsDir, packageName, dataStreamName);
+ const customFields = createCustomFields(dataStreamFieldsDir, docs);
+
+ return [...baseFields, ...customFields];
}
function createBaseFields(
dataStreamFieldsDir: string,
packageName: string,
dataStreamName: string
-): void {
+): Field[] {
const datasetName = `${packageName}.${dataStreamName}`;
const baseFields = nunjucks.render('base_fields.yml.njk', {
module: packageName,
dataset: datasetName,
});
-
createSync(`${dataStreamFieldsDir}/base-fields.yml`, baseFields);
+
+ return load(baseFields) as Field[];
}
-function createCustomFields(dataStreamFieldsDir: string, pipelineResults: object[]): void {
+function createCustomFields(dataStreamFieldsDir: string, pipelineResults: object[]): Field[] {
const mergedResults = mergeSamples(pipelineResults);
const fieldKeys = generateFields(mergedResults);
createSync(`${dataStreamFieldsDir}/fields.yml`, fieldKeys);
+
+ return load(fieldKeys) as Field[];
}
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.test.ts b/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.test.ts
new file mode 100644
index 0000000000000..ae9080fff8a74
--- /dev/null
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.test.ts
@@ -0,0 +1,182 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { testIntegration } from '../../__jest__/fixtures/build_integration';
+import { ensureDirSync, createSync } from '../util';
+import { configure } from 'nunjucks';
+import { join as joinPath } from 'path';
+import { createReadme } from './readme_files';
+
+jest.mock('../util', () => ({
+ ...jest.requireActual('../util'),
+ createSync: jest.fn(),
+ ensureDirSync: jest.fn(),
+}));
+
+describe('createReadme', () => {
+ const integrationPath = 'path';
+
+ const templateDir = joinPath(__dirname, '../templates');
+ configure([templateDir], {
+ autoescape: false,
+ });
+
+ beforeEach(async () => {
+ jest.clearAllMocks();
+ });
+
+ it('Should create expected files', async () => {
+ const fields = [
+ {
+ datastream: 'data_stream_1',
+ fields: [
+ {
+ name: 'data_stream.type',
+ type: 'constant_keyword',
+ description: 'Data stream type.',
+ },
+ {
+ name: 'data_stream.dataset',
+ type: 'constant_keyword',
+ description: 'Data stream dataset name.',
+ },
+ {
+ name: 'event.dataset',
+ type: 'constant_keyword',
+ description: 'Event dataset',
+ value: 'package.datastream',
+ },
+ { name: '@timestamp', type: 'date', description: 'Event timestamp.' },
+ ],
+ },
+ {
+ datastream: 'data_stream_2',
+ fields: [{ name: '@timestamp', type: 'date', description: 'Event timestamp.' }],
+ },
+ ];
+
+ createReadme(integrationPath, testIntegration.name, fields);
+
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/_dev/build/docs/README.md`,
+ expect.any(String)
+ );
+
+ // Docs files
+ expect(ensureDirSync).toHaveBeenCalledWith(`${integrationPath}/docs/`);
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/docs/README.md`,
+ expect.any(String)
+ );
+ });
+
+ it('Should render a table per datastream with fields mapping in package readme', async () => {
+ const fields = [
+ {
+ datastream: 'data_stream_1',
+ fields: [
+ {
+ name: 'data_stream.type',
+ type: 'constant_keyword',
+ description: 'Data stream type.',
+ },
+ {
+ name: 'data_stream.dataset',
+ type: 'constant_keyword',
+ },
+ {
+ name: 'event.dataset',
+ type: 'constant_keyword',
+ description: 'Event dataset',
+ value: 'package.datastream',
+ },
+ { name: '@timestamp', type: 'date', description: 'Event timestamp.' },
+ ],
+ },
+ {
+ datastream: 'data_stream_2',
+ fields: [{ name: '@timestamp', type: 'date', description: 'Event timestamp.' }],
+ },
+ ];
+
+ createReadme(integrationPath, testIntegration.name, fields);
+
+ const firstDatastreamFieldsDisplayed = `
+| Field | Description | Type |
+|---|---|---|
+| data_stream.type | Data stream type. | constant_keyword |
+| data_stream.dataset | | constant_keyword |
+| event.dataset | Event dataset | constant_keyword |
+| @timestamp | Event timestamp. | date |
+`;
+
+ const secondDatastreamFieldsDisplayed = `
+| Field | Description | Type |
+|---|---|---|
+| @timestamp | Event timestamp. | date |
+`;
+
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/docs/README.md`,
+ expect.stringContaining(firstDatastreamFieldsDisplayed)
+ );
+
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/docs/README.md`,
+ expect.stringContaining(secondDatastreamFieldsDisplayed)
+ );
+ });
+
+ it('Should not render a fields mapping table in build readme', async () => {
+ const fields = [
+ {
+ datastream: 'data_stream_1',
+ fields: [{ name: '@timestamp', type: 'date', description: 'Event timestamp.' }],
+ },
+ ];
+
+ createReadme(integrationPath, testIntegration.name, fields);
+
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/_dev/build/docs/README.md`,
+ expect.stringContaining('{{fields "data_stream_1"}}')
+ );
+ });
+
+ it('Should render a formatted table per datastream with fields mapping in package readme', async () => {
+ const fields = [
+ {
+ datastream: 'data_stream_1',
+ fields: [
+ {
+ name: 'data_stream.type',
+ type: 'constant_keyword',
+ description: 'Data stream type.\n',
+ },
+ {
+ name: 'data_stream.dataset',
+ type: 'constant_keyword',
+ },
+ ],
+ },
+ ];
+
+ createReadme(integrationPath, testIntegration.name, fields);
+
+ const firstDatastreamFieldsDisplayed = `
+| Field | Description | Type |
+|---|---|---|
+| data_stream.type | Data stream type. | constant_keyword |
+| data_stream.dataset | | constant_keyword |
+`;
+
+ expect(createSync).toHaveBeenCalledWith(
+ `${integrationPath}/docs/README.md`,
+ expect.stringContaining(firstDatastreamFieldsDisplayed)
+ );
+ });
+});
diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.ts b/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.ts
new file mode 100644
index 0000000000000..163b2b04b52f9
--- /dev/null
+++ b/x-pack/plugins/integration_assistant/server/integration_builder/readme_files.ts
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import nunjucks from 'nunjucks';
+
+import { join as joinPath } from 'path';
+import { createSync, ensureDirSync } from '../util';
+
+export function createReadme(packageDir: string, integrationName: string, fields: object[]) {
+ createPackageReadme(packageDir, integrationName, fields);
+ createBuildReadme(packageDir, integrationName, fields);
+}
+
+function createPackageReadme(packageDir: string, integrationName: string, fields: object[]) {
+ const dirPath = joinPath(packageDir, 'docs/');
+ createReadmeFile(dirPath, 'package_readme.md.njk', integrationName, fields);
+}
+
+function createBuildReadme(packageDir: string, integrationName: string, fields: object[]) {
+ const dirPath = joinPath(packageDir, '_dev/build/docs/');
+ createReadmeFile(dirPath, 'build_readme.md.njk', integrationName, fields);
+}
+
+function createReadmeFile(
+ targetDir: string,
+ templateName: string,
+ integrationName: string,
+ fields: object[]
+) {
+ ensureDirSync(targetDir);
+
+ const template = nunjucks.render(templateName, {
+ package_name: integrationName,
+ fields,
+ });
+
+ createSync(joinPath(targetDir, 'README.md'), template);
+}
diff --git a/x-pack/plugins/integration_assistant/server/templates/build_readme.md.njk b/x-pack/plugins/integration_assistant/server/templates/build_readme.md.njk
new file mode 100644
index 0000000000000..e23fa4af9efe8
--- /dev/null
+++ b/x-pack/plugins/integration_assistant/server/templates/build_readme.md.njk
@@ -0,0 +1,8 @@
+{% include "readme.njk" %}
+{% for data_stream in fields %}
+### {{ data_stream.datastream }}
+
+Insert a description of the datastream here.
+
+{% raw %}{{fields {% endraw %}"{{ data_stream.datastream }}"{% raw %}}}{% endraw %}
+{% endfor %}
\ No newline at end of file
diff --git a/x-pack/plugins/integration_assistant/server/templates/package_readme.md.njk b/x-pack/plugins/integration_assistant/server/templates/package_readme.md.njk
index 02bf606ab386a..b47e3491b5bc2 100644
--- a/x-pack/plugins/integration_assistant/server/templates/package_readme.md.njk
+++ b/x-pack/plugins/integration_assistant/server/templates/package_readme.md.njk
@@ -1,38 +1,17 @@
-# {{ package_name }} Integration
+{% include "readme.njk" %}
+{% for data_stream in fields %}
+### {{ data_stream.datastream }}
-## Overview
-
-Explain what the integration is, define the third-party product that is providing data, establish its relationship to the larger ecosystem of Elastic products, and help the reader understand how it can be used to solve a tangible problem.
-Check the [overview guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-overview) for more information.
-
-## Datastreams
-
-Provide a high-level overview of the kind of data that is collected by the integration.
-Check the [datastreams guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-datastreams) for more information.
-
-## Requirements
-
-The requirements section helps readers to confirm that the integration will work with their systems.
-Check the [requirements guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-requirements) for more information.
-
-## Setup
-
-Point the reader to the [Observability Getting started guide](https://www.elastic.co/guide/en/observability/master/observability-get-started.html) for generic, step-by-step instructions. Include any additional setup instructions beyond what’s included in the guide, which may include instructions to update the configuration of a third-party service.
-Check the [setup guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-setup) for more information.
-
-## Troubleshooting (optional)
-
-Provide information about special cases and exceptions that aren’t necessary for getting started or won’t be applicable to all users. Check the [troubleshooting guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-troubleshooting) for more information.
-
-## Reference
+Insert a description of the datastream here.
-Provide detailed information about the log or metric types we support within the integration. Check the [reference guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-reference) for more information.
+**ECS Field Reference**
-## Logs
-{% for data_stream in data_streams %}
-### {{ data_stream.name }}
+Please refer to the following [document](https://www.elastic.co/guide/en/ecs/current/ecs-field-reference.html) for detailed information on ECS fields.
-Insert a description of the datastream here.
+**Exported fields**
-{% raw %}{{fields {% endraw %}"{{ data_stream.name }}"{% raw %}}}{% endraw %}
+| Field | Description | Type |
+|---|---|---|
+{% for field in data_stream.fields %}| {{ field.name }} | {{ field.description | default('') | replace('\n', ' ') | trim }} | {{ field.type }} |
{% endfor %}
+{% endfor %}
\ No newline at end of file
diff --git a/x-pack/plugins/integration_assistant/server/templates/readme.njk b/x-pack/plugins/integration_assistant/server/templates/readme.njk
new file mode 100644
index 0000000000000..91c1bf6f1b40c
--- /dev/null
+++ b/x-pack/plugins/integration_assistant/server/templates/readme.njk
@@ -0,0 +1,31 @@
+# {{ package_name }} Integration
+
+## Overview
+
+Explain what the integration is, define the third-party product that is providing data, establish its relationship to the larger ecosystem of Elastic products, and help the reader understand how it can be used to solve a tangible problem.
+Check the [overview guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-overview) for more information.
+
+## Datastreams
+
+Provide a high-level overview of the kind of data that is collected by the integration.
+Check the [datastreams guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-datastreams) for more information.
+
+## Requirements
+
+The requirements section helps readers to confirm that the integration will work with their systems.
+Check the [requirements guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-requirements) for more information.
+
+## Setup
+
+Point the reader to the [Observability Getting started guide](https://www.elastic.co/guide/en/observability/master/observability-get-started.html) for generic, step-by-step instructions. Include any additional setup instructions beyond what’s included in the guide, which may include instructions to update the configuration of a third-party service.
+Check the [setup guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-setup) for more information.
+
+## Troubleshooting (optional)
+
+Provide information about special cases and exceptions that aren’t necessary for getting started or won’t be applicable to all users. Check the [troubleshooting guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-troubleshooting) for more information.
+
+## Reference
+
+Provide detailed information about the log or metric types we support within the integration. Check the [reference guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-reference) for more information.
+
+## Logs
\ No newline at end of file
diff --git a/x-pack/plugins/integration_assistant/server/util/samples.test.ts b/x-pack/plugins/integration_assistant/server/util/samples.test.ts
index 131135e842334..f87f9a96ca2c0 100644
--- a/x-pack/plugins/integration_assistant/server/util/samples.test.ts
+++ b/x-pack/plugins/integration_assistant/server/util/samples.test.ts
@@ -5,7 +5,164 @@
* 2.0.
*/
-import { merge } from './samples';
+import { flattenObjectsList, merge } from './samples';
+
+describe('flattenObjectsList', () => {
+ it('Should return a list with flattened key/value entries', async () => {
+ const result = flattenObjectsList([
+ {
+ name: 'a',
+ type: 'group',
+ fields: [
+ {
+ name: 'b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ {
+ name: 'c',
+ type: 'group',
+ fields: [
+ {
+ name: 'd',
+ type: 'keyword',
+ },
+ {
+ name: 'e',
+ description: 'Some description for e',
+ type: 'keyword',
+ },
+ ],
+ },
+ ],
+ },
+ ]);
+
+ expect(result).toEqual([
+ {
+ name: 'a.b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ {
+ name: 'a.c.d',
+ type: 'keyword',
+ description: undefined,
+ },
+ {
+ name: 'a.c.e',
+ type: 'keyword',
+ description: 'Some description for e',
+ },
+ ]);
+ });
+
+ it('Should return an empty list if passed an empty list', async () => {
+ const result = flattenObjectsList([]);
+
+ expect(result).toEqual([]);
+ });
+
+ it('Should return a list with key/value entries', async () => {
+ const result = flattenObjectsList([
+ {
+ name: 'a',
+ type: 'keyword',
+ description: 'Some description for a',
+ },
+ ]);
+
+ expect(result).toEqual([
+ {
+ name: 'a',
+ type: 'keyword',
+ description: 'Some description for a',
+ },
+ ]);
+ });
+
+ it('Should return an sorted list of key/value entries', async () => {
+ const result = flattenObjectsList([
+ {
+ name: 'c',
+ type: 'group',
+ fields: [
+ {
+ name: 'b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ {
+ name: 'a',
+ type: 'group',
+ fields: [
+ {
+ name: 'e',
+ type: 'keyword',
+ description: 'Some description for e',
+ },
+ {
+ name: 'd',
+ type: 'keyword',
+ },
+ ],
+ },
+ ],
+ },
+ ]);
+
+ expect(result).toEqual([
+ {
+ name: 'c.a.d',
+ type: 'keyword',
+ description: undefined,
+ },
+ {
+ name: 'c.a.e',
+ type: 'keyword',
+ description: 'Some description for e',
+ },
+ {
+ name: 'c.b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ ]);
+ });
+
+ it('Should not error if group type is not an array', async () => {
+ const result = flattenObjectsList([
+ {
+ name: 'a',
+ type: 'group',
+ fields: [
+ {
+ name: 'b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ {
+ name: 'c',
+ type: 'group',
+ },
+ ],
+ },
+ ]);
+
+ expect(result).toEqual([
+ {
+ name: 'a.b',
+ type: 'keyword',
+ description: 'Some description for b',
+ },
+ {
+ name: 'a.c',
+ type: 'group',
+ description: undefined,
+ },
+ ]);
+ });
+});
describe('merge', () => {
it('Should return source if target is empty', async () => {
diff --git a/x-pack/plugins/integration_assistant/server/util/samples.ts b/x-pack/plugins/integration_assistant/server/util/samples.ts
index a29813c1643f8..6993e87a774e9 100644
--- a/x-pack/plugins/integration_assistant/server/util/samples.ts
+++ b/x-pack/plugins/integration_assistant/server/util/samples.ts
@@ -18,9 +18,10 @@ interface NewObj {
};
}
-interface Field {
+export interface Field {
name: string;
type: string;
+ description?: string;
fields?: Field[];
}
@@ -233,3 +234,43 @@ export function mergeSamples(objects: any[]): string {
return JSON.stringify(result, null, 2);
}
+
+export function flattenObjectsList(
+ obj: Field[]
+): Array<{ name: string; type: string; description?: string }> {
+ const result: Array<{ name: string; type: string; description?: string }> = [];
+ flattenObject(obj, '', '.', result);
+
+ return sortArrayOfObjects(result);
+}
+
+function flattenObject(
+ obj: Field[],
+ parentKey: string = '',
+ separator: string = '.',
+ result: Array<{ name: string; type: string; description?: string }>
+): void {
+ obj.forEach((element) => {
+ if (element.name) {
+ const newKey = parentKey ? `${parentKey}${separator}${element.name}` : element.name;
+
+ if (element.fields && Array.isArray(element.fields)) {
+ flattenObject(element.fields, newKey, separator, result);
+ } else {
+ result.push({
+ name: newKey,
+ type: element.type,
+ description: element.description,
+ });
+ }
+ }
+ });
+}
+
+function sortArrayOfObjects(
+ objectsArray: Array<{ name: string; type: string; description?: string }>
+): Array<{ name: string; type: string; description?: string }> {
+ return objectsArray.sort((a, b) => {
+ return a.name.localeCompare(b.name);
+ });
+}
From 896dce358c05d6553ac184abae2164a907447c31 Mon Sep 17 00:00:00 2001
From: Shahzad
Date: Mon, 30 Sep 2024 18:09:52 +0200
Subject: [PATCH 019/107] [SLOs] Update API docs for group-by field !!
(#194393)
## Summary
Update API docs for group-by field !!
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
---
.../output/kibana.serverless.staging.yaml | 29 +++++++++-----
oas_docs/output/kibana.serverless.yaml | 29 +++++++++-----
oas_docs/output/kibana.staging.yaml | 29 +++++++++-----
oas_docs/output/kibana.yaml | 29 +++++++++-----
.../slo/docs/openapi/slo/bundled.json | 40 ++++++++++++++-----
.../slo/docs/openapi/slo/bundled.yaml | 27 ++++++++-----
.../schemas/create_slo_request.yaml | 4 +-
.../slo/components/schemas/group_by.yaml | 11 +++++
.../schemas/slo_definition_response.yaml | 6 +--
.../schemas/slo_with_summary_response.yaml | 6 +--
.../schemas/update_slo_request.yaml | 2 +
11 files changed, 147 insertions(+), 65 deletions(-)
create mode 100644 x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/group_by.yaml
diff --git a/oas_docs/output/kibana.serverless.staging.yaml b/oas_docs/output/kibana.serverless.staging.yaml
index cf5cdbac0e9a4..a5d53bd71cc83 100644
--- a/oas_docs/output/kibana.serverless.staging.yaml
+++ b/oas_docs/output/kibana.serverless.staging.yaml
@@ -32014,9 +32014,7 @@ components:
description: A description for the SLO.
type: string
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: >-
A optional and unique identifier for the SLO. Must be between 8 and
@@ -32175,6 +32173,21 @@ components:
type: number
title: Find SLO response
type: object
+ SLOs_group_by:
+ description: >-
+ optional group by field or fields to use to generate an SLO per distinct
+ value
+ example:
+ - - service.name
+ - service.name
+ - - service.name
+ - service.environment
+ oneOf:
+ - type: string
+ - items:
+ type: string
+ type: array
+ title: Group by
SLOs_indicator_properties_apm_availability:
description: Defines properties for the APM availability indicator type
type: object
@@ -32765,9 +32778,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -32851,9 +32862,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -33074,6 +33083,8 @@ components:
description:
description: A description for the SLO.
type: string
+ groupBy:
+ $ref: '#/components/schemas/SLOs_group_by'
indicator:
oneOf:
- $ref: '#/components/schemas/SLOs_indicator_properties_custom_kql'
diff --git a/oas_docs/output/kibana.serverless.yaml b/oas_docs/output/kibana.serverless.yaml
index 675e8c0903b2f..93c3a5533c8a0 100644
--- a/oas_docs/output/kibana.serverless.yaml
+++ b/oas_docs/output/kibana.serverless.yaml
@@ -15304,9 +15304,7 @@ components:
description: A description for the SLO.
type: string
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: >-
A optional and unique identifier for the SLO. Must be between 8 and
@@ -15465,6 +15463,21 @@ components:
type: number
title: Find SLO response
type: object
+ SLOs_group_by:
+ description: >-
+ optional group by field or fields to use to generate an SLO per distinct
+ value
+ example:
+ - - service.name
+ - service.name
+ - - service.name
+ - service.environment
+ oneOf:
+ - type: string
+ - items:
+ type: string
+ type: array
+ title: Group by
SLOs_indicator_properties_apm_availability:
description: Defines properties for the APM availability indicator type
type: object
@@ -16055,9 +16068,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -16141,9 +16152,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -16364,6 +16373,8 @@ components:
description:
description: A description for the SLO.
type: string
+ groupBy:
+ $ref: '#/components/schemas/SLOs_group_by'
indicator:
oneOf:
- $ref: '#/components/schemas/SLOs_indicator_properties_custom_kql'
diff --git a/oas_docs/output/kibana.staging.yaml b/oas_docs/output/kibana.staging.yaml
index c70f9b4bce454..96352fc0cd962 100644
--- a/oas_docs/output/kibana.staging.yaml
+++ b/oas_docs/output/kibana.staging.yaml
@@ -40005,9 +40005,7 @@ components:
description: A description for the SLO.
type: string
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: >-
A optional and unique identifier for the SLO. Must be between 8 and
@@ -40166,6 +40164,21 @@ components:
type: number
title: Find SLO response
type: object
+ SLOs_group_by:
+ description: >-
+ optional group by field or fields to use to generate an SLO per distinct
+ value
+ example:
+ - - service.name
+ - service.name
+ - - service.name
+ - service.environment
+ oneOf:
+ - type: string
+ - items:
+ type: string
+ type: array
+ title: Group by
SLOs_indicator_properties_apm_availability:
description: Defines properties for the APM availability indicator type
type: object
@@ -40756,9 +40769,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -40842,9 +40853,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -41065,6 +41074,8 @@ components:
description:
description: A description for the SLO.
type: string
+ groupBy:
+ $ref: '#/components/schemas/SLOs_group_by'
indicator:
oneOf:
- $ref: '#/components/schemas/SLOs_indicator_properties_custom_kql'
diff --git a/oas_docs/output/kibana.yaml b/oas_docs/output/kibana.yaml
index 947ae1155041f..6d53cb1a38bdd 100644
--- a/oas_docs/output/kibana.yaml
+++ b/oas_docs/output/kibana.yaml
@@ -22291,9 +22291,7 @@ components:
description: A description for the SLO.
type: string
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: >-
A optional and unique identifier for the SLO. Must be between 8 and
@@ -22452,6 +22450,21 @@ components:
type: number
title: Find SLO response
type: object
+ SLOs_group_by:
+ description: >-
+ optional group by field or fields to use to generate an SLO per distinct
+ value
+ example:
+ - - service.name
+ - service.name
+ - - service.name
+ - service.environment
+ oneOf:
+ - type: string
+ - items:
+ type: string
+ type: array
+ title: Group by
SLOs_indicator_properties_apm_availability:
description: Defines properties for the APM availability indicator type
type: object
@@ -23042,9 +23055,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -23128,9 +23139,7 @@ components:
example: true
type: boolean
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- example: some.field
- type: string
+ $ref: '#/components/schemas/SLOs_group_by'
id:
description: The identifier of the SLO.
example: 8853df00-ae2e-11ed-90af-09bb6422b258
@@ -23351,6 +23360,8 @@ components:
description:
description: A description for the SLO.
type: string
+ groupBy:
+ $ref: '#/components/schemas/SLOs_group_by'
indicator:
oneOf:
- $ref: '#/components/schemas/SLOs_indicator_properties_custom_kql'
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.json b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.json
index 7e28ae729cc46..b8d3e28ce210a 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.json
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.json
@@ -1738,6 +1738,31 @@
}
}
},
+ "group_by": {
+ "title": "Group by",
+ "description": "optional group by field or fields to use to generate an SLO per distinct value",
+ "example": [
+ [
+ "service.name"
+ ],
+ "service.name",
+ [
+ "service.name",
+ "service.environment"
+ ]
+ ],
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
"slo_with_summary_response": {
"title": "SLO response",
"type": "object",
@@ -1835,9 +1860,7 @@
"example": true
},
"groupBy": {
- "description": "optional group by field to use to generate an SLO per distinct value",
- "type": "string",
- "example": "some.field"
+ "$ref": "#/components/schemas/group_by"
},
"instanceId": {
"description": "the value derived from the groupBy field, if present, otherwise '*'",
@@ -2046,9 +2069,7 @@
"$ref": "#/components/schemas/settings"
},
"groupBy": {
- "description": "optional group by field to use to generate an SLO per distinct value",
- "type": "string",
- "example": "some.field"
+ "$ref": "#/components/schemas/group_by"
},
"tags": {
"description": "List of tags",
@@ -2142,6 +2163,9 @@
"settings": {
"$ref": "#/components/schemas/settings"
},
+ "groupBy": {
+ "$ref": "#/components/schemas/group_by"
+ },
"tags": {
"description": "List of tags",
"type": "array",
@@ -2243,9 +2267,7 @@
"example": true
},
"groupBy": {
- "description": "optional group by field to use to generate an SLO per distinct value",
- "type": "string",
- "example": "some.field"
+ "$ref": "#/components/schemas/group_by"
},
"tags": {
"description": "List of tags",
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.yaml
index 0426d2d03bc89..dc57f3e4ea4f6 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.yaml
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/bundled.yaml
@@ -1204,6 +1204,19 @@ components:
example: 0.9836
errorBudget:
$ref: '#/components/schemas/error_budget'
+ group_by:
+ title: Group by
+ description: optional group by field or fields to use to generate an SLO per distinct value
+ example:
+ - - service.name
+ - service.name
+ - - service.name
+ - service.environment
+ oneOf:
+ - type: string
+ - type: array
+ items:
+ type: string
slo_with_summary_response:
title: SLO response
type: object
@@ -1274,9 +1287,7 @@ components:
type: boolean
example: true
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: some.field
+ $ref: '#/components/schemas/group_by'
instanceId:
description: the value derived from the groupBy field, if present, otherwise '*'
type: string
@@ -1425,9 +1436,7 @@ components:
settings:
$ref: '#/components/schemas/settings'
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: some.field
+ $ref: '#/components/schemas/group_by'
tags:
description: List of tags
type: array
@@ -1487,6 +1496,8 @@ components:
$ref: '#/components/schemas/objective'
settings:
$ref: '#/components/schemas/settings'
+ groupBy:
+ $ref: '#/components/schemas/group_by'
tags:
description: List of tags
type: array
@@ -1558,9 +1569,7 @@ components:
type: boolean
example: true
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: some.field
+ $ref: '#/components/schemas/group_by'
tags:
description: List of tags
type: array
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/create_slo_request.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/create_slo_request.yaml
index c3a848fe52133..292836da1c535 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/create_slo_request.yaml
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/create_slo_request.yaml
@@ -37,9 +37,7 @@ properties:
settings:
$ref: "settings.yaml"
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: "some.field"
+ $ref: "group_by.yaml"
tags:
description: List of tags
type: array
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/group_by.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/group_by.yaml
new file mode 100644
index 0000000000000..6870d539c17ee
--- /dev/null
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/group_by.yaml
@@ -0,0 +1,11 @@
+title: Group by
+description: optional group by field or fields to use to generate an SLO per distinct value
+example:
+ - [ "service.name" ]
+ - service.name
+ - [ "service.name", "service.environment" ]
+oneOf:
+ - type: string
+ - type: array
+ items:
+ type: string
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_definition_response.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_definition_response.yaml
index 0b4ffa774d10f..430b105eb32fc 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_definition_response.yaml
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_definition_response.yaml
@@ -63,9 +63,7 @@ properties:
type: boolean
example: true
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: "some.field"
+ $ref: "group_by.yaml"
tags:
description: List of tags
type: array
@@ -82,4 +80,4 @@ properties:
version:
description: The internal SLO version
type: number
- example: 2
\ No newline at end of file
+ example: 2
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_with_summary_response.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_with_summary_response.yaml
index df8e35996feb3..3da2423acb154 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_with_summary_response.yaml
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/slo_with_summary_response.yaml
@@ -67,9 +67,7 @@ properties:
type: boolean
example: true
groupBy:
- description: optional group by field to use to generate an SLO per distinct value
- type: string
- example: "some.field"
+ $ref: "group_by.yaml"
instanceId:
description: the value derived from the groupBy field, if present, otherwise '*'
type: string
@@ -90,4 +88,4 @@ properties:
version:
description: The internal SLO version
type: number
- example: 2
\ No newline at end of file
+ example: 2
diff --git a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/update_slo_request.yaml b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/update_slo_request.yaml
index 8d2c61c7b2249..95603878e8e99 100644
--- a/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/update_slo_request.yaml
+++ b/x-pack/plugins/observability_solution/slo/docs/openapi/slo/components/schemas/update_slo_request.yaml
@@ -26,6 +26,8 @@ properties:
$ref: "objective.yaml"
settings:
$ref: "settings.yaml"
+ groupBy:
+ $ref: "group_by.yaml"
tags:
description: List of tags
type: array
From 5def848d2cee99e337a46f57893c14be1d1c7052 Mon Sep 17 00:00:00 2001
From: Vadim Kibana <82822460+vadimkibana@users.noreply.github.com>
Date: Mon, 30 Sep 2024 18:14:58 +0200
Subject: [PATCH 020/107] [ES|QL] AST package documentation (#194296)
Updates documentation for the ES|QL AST package.
---
packages/kbn-esql-ast/README.md | 97 ++-------
packages/kbn-esql-ast/src/builder/README.md | 39 ++++
packages/kbn-esql-ast/src/parser/README.md | 144 ++++++++++++-
.../kbn-esql-ast/src/pretty_print/README.md | 76 ++++++-
packages/kbn-esql-ast/src/visitor/README.md | 202 +++++++++++++++++-
packages/kbn-esql-ast/src/walker/README.md | 125 ++++++++---
6 files changed, 575 insertions(+), 108 deletions(-)
create mode 100644 packages/kbn-esql-ast/src/builder/README.md
diff --git a/packages/kbn-esql-ast/README.md b/packages/kbn-esql-ast/README.md
index 76232d371b9cb..f7be5248f2ca0 100644
--- a/packages/kbn-esql-ast/README.md
+++ b/packages/kbn-esql-ast/README.md
@@ -1,89 +1,38 @@
-# ES|QL utility library
+# ES|QL AST library
-## Folder structure
+The general idea of this package is to provide low-level ES|QL parsing,
+building, traversal, pretty-printing, and manipulation features on top of a
+custom compact AST representation, which is designed to be resilient to many
+grammar changes.
-This library brings all the foundation data structure to enable all advanced features within an editor for ES|QL as validation, autocomplete, hover, etc...
-The package is structure as follow:
+Contents of this package:
-```
-src
- |- antlr // => contains the ES|QL grammar files and various compilation assets
- | ast_factory.ts // => binding to the Antlr that generates the AST data structure
- | ast_errors.ts // => error translation utility from raw Antlr to something understandable (somewhat)
- | antlr_error_listener.ts // => The ES|QL syntax error listener
- | antlr_facade.ts // => getParser and getLexer utilities
- | ... // => miscellaneas utilities to work with AST
-```
-
-### Basic usage
-
-#### Get AST from a query string
+- [`builder` — Contains the `Builder` class for AST node construction](./src/builder/README.md).
+- [`parser` — Contains text to ES|QL AST parsing code](./src/parser/README.md).
+- [`walker` — Contains the ES|QL AST `Walker` utility](./src/walker/README.md).
+- [`visitor` — Contains the ES|QL AST `Visitor` utility](./src/visitor/README.md).
+- [`pretty_print` — Contains code for formatting AST to text](./src/pretty_print/README.md).
-This module contains the entire logic to translate from a query string into the AST data structure.
-The `getAstAndSyntaxErrors` function returns the AST data structure, unless a syntax error happens in which case the `errors` array gets populated with a Syntax error.
-##### Usage
+## Demo
-```js
-import { getAstAndSyntaxErrors } from '@kbn/esql-ast';
+Much of the functionality of this package is demonstrated in the demo UI. You
+can run it in Storybook, using the following command:
-const queryString = "from index | stats 1 + avg(myColumn) ";
-const { ast, errors} = await astProvider(queryString);
-
-if(errors){
- console.log({ syntaxErrors: errors });
-}
-// do stuff with the ast
+```bash
+yarn storybook esql_ast_inspector
```
-## How does it work
-
-The general idea of this package is to provide all ES|QL features on top of a custom compact AST definition (all data structure types defined in `./types.ts`) which is designed to be resilient to many grammar changes.
-The pipeline is the following:
+Alternatively, you can start Kibana with *Example Plugins* enabled, using:
+```bash
+yarn start --run-examples
```
-Antlr grammar files
-=> Compiled grammar files (.ts assets in the antlr folder)
-=> AST Factory (Antlr Parser tree => custom AST)
-```
-
-Each feature function works with the combination of the AST and the definition files: the former describe the current statement in a easy to traverse way, while the definitions describe what's the expected behaviour of each node in the AST node (i.e. what arguments should it accept? How many arguments? etc...).
-While AST requires the grammar to be compiled to be updated, definitions are static files which can be dynamically updated without running the ANTLR compile task.
-
-#### AST
-
-The AST is generated by 2 files: `ast_factory.ts` and its buddy `ast_walker.ts`:
-* `ast_factory.ts` is a binding to Antlr and access the Parser tree
-* Parser tree is passed over to `ast_walker` to append new AST nodes
-
-In general Antlr is resilient to grammar errors, in the sense that it can produe a Parser tree up to the point of the error, then stops. This is useful to perform partial tasks even with broken queries and this means that a partial AST can be produced even with an invalid query.
-
-### Keeping ES|QL up to date
-
-In general when operating on changes here use the `yarn kbn watch` in a terminal window to make sure changes are correctly compiled.
-
-### How to add new commands/options
-When a new command/option is added to ES|QL it is done via a grammar update.
-Therefore adding them requires a two step phase:
-* Update the grammar with the new one
- * add/fix all AST generator bindings in case of new/changed TOKENS in the `lexer` grammar file
-* Update the definition files for commands/options
+Then navigate to the *ES|QL AST Inspector* plugin in the Kibana UI.
-To update the grammar:
-1. Make sure the `lexer` and `parser` files are up to date with their ES counterparts
- * an existing Kibana CI job is updating them already automatically
-2. Run the script into the `package.json` to compile the ES|QL grammar.
-3. open the `ast_factory.ts` file and add a new `exit` method
-4. write some code in the `ast_walker/ts` to translate the Antlr Parser tree into the custom AST (there are already few utilites for that, but sometimes it is required to write some more code if the `parser` introduced a new flow)
- * pro tip: use the `http://lab.antlr.org/` to visualize/debug the parser tree for a given statement (copy and paste the grammar files there)
-5. if something goes wrong with new quoted/unquoted identifier token, open the `ast_helpers.ts` and check the ids of the new tokens in the `getQuotedText` and `getUnquotedText` functions - please make sure to leave a comment on the token name
-#### Debug and fix grammar changes (tokens, etc...)
+## Keeping ES|QL AST library up to date
-On TOKEN renaming or with subtle `lexer` grammar changes it can happens that test breaks, this can be happen for two main issues:
-* A TOKEN name changed so the `ast_walker.ts` doesn't find it any more. Go there and rename the TOKEN name.
-* TOKEN order changed and tests started failing. This probably generated some TOKEN id reorder and there are two functions in `ast_helpers.ts` who rely on hardcoded ids: `getQuotedText` and `getUnquotedText`.
- * Note that the `getQuotedText` and `getUnquotedText` are automatically updated on grammar changes detected by the Kibana CI sync job.
- * to fix this just look at the commented tokens and update the ids. If a new token add it and leave a comment to point to the new token name.
- * This choice was made to reduce the bundle size, as importing the `esql_parser` adds some hundreds of Kbs to the bundle otherwise.
\ No newline at end of file
+In general when operating on changes here use the `yarn kbn watch` in a terminal
+window to make sure changes are correctly compiled.
diff --git a/packages/kbn-esql-ast/src/builder/README.md b/packages/kbn-esql-ast/src/builder/README.md
new file mode 100644
index 0000000000000..8b874579dab29
--- /dev/null
+++ b/packages/kbn-esql-ast/src/builder/README.md
@@ -0,0 +1,39 @@
+# Builder
+
+Contains the `Builder` class for AST node construction. It provides the most
+low-level stateless AST node construction API.
+
+The `Builder` API can be used when constructing AST nodes from scratch manually,
+and it is also used by the parser to construct the AST nodes during the parsing
+process.
+
+When parsing the AST nodes will typically have more information, such as the
+position in the source code, and other metadata. When constructing the AST nodes
+manually, this information is not available, but the `Builder` API can still be
+used as it permits to skip the metadata.
+
+
+## Usage
+
+Construct a `literal` expression node:
+
+```typescript
+import { Builder } from '@kbn/esql-ast';
+
+const node = Builder.expression.literal.numeric({ value: 42, literalType: 'integer' });
+```
+
+Returns:
+
+```js
+{
+ type: 'literal',
+ literalType: 'integer',
+ value: 42,
+ name: '42',
+
+ location: { min: 0, max: 0 },
+ text: '',
+ incomplete: false,
+}
+```
diff --git a/packages/kbn-esql-ast/src/parser/README.md b/packages/kbn-esql-ast/src/parser/README.md
index 1500be94c40c8..e054c8999714c 100644
--- a/packages/kbn-esql-ast/src/parser/README.md
+++ b/packages/kbn-esql-ast/src/parser/README.md
@@ -1,6 +1,91 @@
+# ES|QL Parser
+
+The Kibana ES|QL parser uses the ANTLR library for lexing and parse tree (CST)
+generation. The ANTLR grammar is imported from the Elasticsearch repository in
+an automated CI job.
+
+We use the ANTLR outputs: (1) the token stream; and (2) the parse tree to
+generate (1) the Abstract Syntax Tree (AST), (2) for syntax validation, (3) for
+syntax highlighting, and (4) for formatting (comment and whitespace) extraction
+and assignment to AST nodes.
+
+In general ANTLR is resilient to grammar errors, in the sense that it can
+produce a Parser tree up to the point of the error, then stops. This is useful
+to perform partial tasks even with broken queries and this means that a partial
+AST can be produced even with an invalid query.
+
+
+## Folder structure
+
+The parser is structured as follows:
+
+```
+src/
+|- parser/ Contains the logic to parse the ES|QL query and generate the AST.
+| |- factories.ts Contains AST node factories.
+| |- antlr_error_listener.ts Contains code which traverses ANTLR CST and collects syntax errors.
+| |- esql_ast_builder_listener.ts Contains code which traverses ANTLR CST and builds the AST.
+|
+|- antlr/ Contains the autogenerated ES|QL ANTLR grammar files and various compilation assets.
+ |- esql_lexer.g4 Contains the ES|QL ANTLR lexer grammar.
+ |- esql_parser.g4 Contains the ES|QL ANTLR parser grammar.
+```
+
+
+## Usage
+
+### Get AST from a query string
+
+The `parse` function returns the AST data structure, unless a syntax error
+happens in which case the `errors` array gets populated with a Syntax errors.
+
+```js
+import { parse } from '@kbn/esql-ast';
+
+const src = "FROM index | STATS 1 + AVG(myColumn) ";
+const { root, errors } = await parse(src);
+
+if(errors){
+ console.log({ syntaxErrors: errors });
+}
+
+// do stuff with the ast
+```
+
+The `root` is the root node of the AST. The AST is a tree structure where each
+node represents a part of the query. Each node has a `type` property which
+indicates the type of the node.
+
+
+### Parse a query and populate the AST with comments
+
+When calling the `parse` method with the `withFormatting` flag set to `true`,
+the AST will be populated with comments.
+
+```js
+import { parse } from '@kbn/esql-ast';
+
+const src = "FROM /* COMMENT */ index";
+const { root } = await parse(src, { withFormatting: true });
+```
+
+
## Comments
-### Inter-node comment places
+By default, when parsing the AST does not include any *formatting* information,
+such as comments or whitespace. This is because the AST is designed to be
+compact and to be used for syntax validation, syntax highlighting, and other
+high-level operations.
+
+However, sometimes it is useful to have comments attached to the AST nodes. The
+parser can collect all comments when the `withFormatting` flag is set to `true`
+and attach them to the AST nodes. The comments are attached to the closest node,
+while also considering the surrounding punctuation.
+
+### Inter-node comments
+
+Currently, when parsed inter-node comments are attached to the node from the
+left side.
Around colon in source identifier:
@@ -25,3 +110,60 @@ Time interface expressions:
```eslq
STATS 1 /* asdf */ DAY
```
+
+
+## Internal Details
+
+
+### How does it work?
+
+The pipeline is the following:
+
+1. ANTLR grammar files are added to Kibana.
+2. ANTLR grammar files are compiled to `.ts` assets in the `antlr` folder.
+3. A query is parsed to a CST by ANTLR.
+4. The `ESQLAstBuilderListener` traverses the CST and builds the AST.
+5. Optionally:
+ 1. Comments and whitespace are extracted from the ANTLR lexer's token stream.
+ 2. The comments and whitespace are attached to the AST nodes.
+
+
+### How to add new commands/options?
+
+When a new command/option is added to ES|QL it is done via a grammar update.
+Therefore adding them requires a two step phase:
+
+To update the grammar:
+
+1. Make sure the `lexer` and `parser` files are up to date with their ES
+ counterparts.
+ * an existing Kibana CI job is updating them already automatically
+2. Run the script into the `package.json` to compile the ES|QL grammar.
+3. open the `ast_factory.ts` file and add a new `exit` method
+4. write some code in the `ast_walker/ts` to translate the Antlr Parser tree
+ into the custom AST (there are already few utilites for that, but sometimes
+ it is required to write some more code if the `parser` introduced a new flow)
+ * pro tip: use the `http://lab.antlr.org/` to visualize/debug the parser tree
+ for a given statement (copy and paste the grammar files there)
+5. if something goes wrong with new quoted/unquoted identifier token, open
+ the `ast_helpers.ts` and check the ids of the new tokens in the `getQuotedText`
+ and `getUnquotedText` functions, please make sure to leave a comment on the
+ token name
+
+
+#### Debug and fix grammar changes (tokens, etc...)
+
+On token renaming or with subtle `lexer` grammar changes it can happens that
+test breaks, this can be happen for two main issues:
+
+* A token name changed so the `esql_ast_builder_listener.ts` doesn't find it any
+ more. Go there and rename the TOKEN name.
+* Token order changed and tests started failing. This probably generated some
+ token id reorder and there are two functions in `helpers.ts` who rely on
+ hardcoded ids: `getQuotedText` and `getUnquotedText`.
+ * Note that the `getQuotedText` and `getUnquotedText` are automatically
+ updated on grammar changes detected by the Kibana CI sync job.
+ * to fix this just look at the commented tokens and update the ids. If a new
+ token add it and leave a comment to point to the new token name.
+ * This choice was made to reduce the bundle size, as importing the
+ `esql_parser` adds some hundreds of Kbs to the bundle otherwise.
diff --git a/packages/kbn-esql-ast/src/pretty_print/README.md b/packages/kbn-esql-ast/src/pretty_print/README.md
index 48066697a5a7e..1d600fc19d3bc 100644
--- a/packages/kbn-esql-ast/src/pretty_print/README.md
+++ b/packages/kbn-esql-ast/src/pretty_print/README.md
@@ -4,20 +4,82 @@
human-readable string. This is useful for debugging or for displaying
the AST to the user.
-This module provides a number of pretty-printing options.
+This module provides a number of pretty-printing facilities. There are two
+main classes that provide pretty-printing:
+
+- `BasicPrettyPrinter` — provides the basic pretty-printing to a single
+ line.
+- `WrappingPrettyPrinter` — provides more advanced pretty-printing, which
+ can wrap the query to multiple lines, and can also wrap the query to a
+ specific width.
## `BasicPrettyPrinter`
-The `BasicPrettyPrinter` class provides the most basic pretty-printing—it
-prints a query to a single line. Or it can print a query with each command on
-a separate line, with the ability to customize the indentation before the pipe
-character.
+The `BasicPrettyPrinter` class provides the simpler pretty-printing
+functionality—it prints a query to a single line. Or, it can print a query
+with each command on a separate line, with the ability to customize the
+indentation before the pipe character.
+
+Usage:
+
+```typescript
+import { parse, BasicPrettyPrinter } from '@kbn/esql-ast';
+
+const src = 'FROM index | LIMIT 10';
+const { root } = parse(src);
+const text = BasicPrettyPrinter.print(root);
+
+console.log(text); // FROM index | LIMIT 10
+```
+
+It can print each command on a separate line, with a custom indentation before
+the pipe character:
+
+```typescript
+const text = BasicPrettyPrinter.multiline(root, { pipeTab: ' ' });
+```
It can also print a single command to a single line; or an expression to a
-single line.
+single line. Below is the summary of the top-level functions:
- `BasicPrettyPrinter.print()` — prints query to a single line.
- `BasicPrettyPrinter.multiline()` — prints a query to multiple lines.
- `BasicPrettyPrinter.command()` — prints a command to a single line.
-- `BasicPrettyPrinter.expression()` — prints an expression to a single line.
+- `BasicPrettyPrinter.expression()` — prints an expression to a single
+ line.
+
+See `BasicPrettyPrinterOptions` for formatting options. For example, a
+`lowercase` options allows you to lowercase all ES|QL keywords:
+
+```typescript
+const text = BasicPrettyPrinter.print(root, { lowercase: true });
+```
+
+The `BasicPrettyPrinter` prints only *left* and *right* multi-line comments,
+which do not have line breaks, as this formatter is designed to print a query
+to a single line. If you need to print a query to multiple lines, use the
+`WrappingPrettyPrinter`.
+
+
+## `WrappingPrettyPrinter`
+
+The *wrapping pretty printer* can print a query to multiple lines, and can wrap
+the text to a new line if the line width exceeds a certain threshold. It also
+prints all comments attached to the AST (including ones that force the text
+to be wrapped).
+
+Usage:
+
+```typescript
+import { parse, WrappingPrettyPrinter } from '@kbn/esql-ast';
+
+const src = `
+ FROM index /* this is a comment */
+ | LIMIT 10`;
+const { root } = parse(src, { withFormatting: true });
+const text = WrappingPrettyPrinter.print(root);
+```
+
+See `WrappingPrettyPrinterOptions` interface for available formatting options.
+
diff --git a/packages/kbn-esql-ast/src/visitor/README.md b/packages/kbn-esql-ast/src/visitor/README.md
index c952c8a34d8d9..20d55c0967e10 100644
--- a/packages/kbn-esql-ast/src/visitor/README.md
+++ b/packages/kbn-esql-ast/src/visitor/README.md
@@ -1,4 +1,28 @@
-## High-level AST structure
+# `Visitor` Traversal API
+
+The `Visitor` traversal API provides a feature-rich way to traverse the ES|QL
+AST. It is more powerful than the [`Walker` API](../walker/README.md), as it
+allows to traverse the AST in a more flexible way.
+
+The `Visitor` API allows to traverse the AST starting from the root node or a
+command statement, or an expression. Unlike in the `Walker` API, the `Visitor`
+does not automatically traverse the entire AST. Instead, the developer has to
+manually call the necessary *visit* methods to traverse the AST. This allows
+to traverse the AST in a more flexible way: only traverse the parts of the AST
+that are needed, or maybe traverse the AST in a different order, or multiple
+times.
+
+The `Visitor` API is also more powerful than the `Walker` API, as for each
+visitor callback it provides a *context* object, which contains the information
+about the current node as well as the parent node, and the whole parent chain
+up to the root node.
+
+In addition, each visitor callback can return a value (*output*), which is then
+passed to the parent node, in the place where the visitor was called. Also, when
+a child is visited, the parent node can pass in *input* to the child visitor.
+
+
+## About ES|QL AST structure
Broadly, there are two AST node types: (1) commands (say `FROM ...`, like
*statements* in other languages), and (2) expressions (say `a + b`, or `fn()`).
@@ -59,7 +83,8 @@ As of this writing, the following expressions are defined:
- Column identifier expression, `{type: "column"}`, like `@timestamp`
- Function call expression, `{type: "function"}`, like `fn(123)`
- Literal expression, `{type: "literal"}`, like `123`, `"hello"`
-- List literal expression, `{type: "list"}`, like `[1, 2, 3]`, `["a", "b", "c"]`, `[true, false]`
+- List literal expression, `{type: "list"}`, like `[1, 2, 3]`,
+ `["a", "b", "c"]`, `[true, false]`
- Time interval expression, `{type: "interval"}`, like `1h`, `1d`, `1w`
- Inline cast expression, `{type: "cast"}`, like `abc::int`, `def::string`
- Unknown node, `{type: "unknown"}`
@@ -67,3 +92,176 @@ As of this writing, the following expressions are defined:
Each expression has a `visitExpressionX` callback, where `X` is the type of the
expression. If a expression-specific callback is not found, the generic
`visitExpression` callback is called.
+
+
+## `Visitor` API Usage
+
+The `Visitor` API is used to traverse the AST. The process is as follows:
+
+1. Create a new `Visitor` instance.
+2. Register callbacks for the nodes you are interested in.
+3. Call the `visitQuery`, `visitCommand`, or `visitExpression` method to start
+ the traversal.
+
+For example, the below code snippet prints the type of each expression node:
+
+```typescript
+new Visitor()
+ .on('visitExpression', (ctx) => console.log(ctx.node.type))
+ .on('visitCommand', (ctx) => [...ctx.visitArguments()])
+ .on('visitQuery', (ctx) => [...ctx.visitCommands()])
+ .visitQuery(root);
+```
+
+In the `visitQuery` callback it visits all commands, using the `visitCommands`.
+In the `visitCommand` callback it visits all arguments, using the
+`visitArguments`. And finally, in the `visitExpression` callback it prints the
+type of the expression node.
+
+Above we started the traversal from the root node, using the `.visitQuery(root)`
+method. However, one can start the traversal from any node, by calling the
+following methods:
+
+- `.visitQuery()` — Start traversal from the root node.
+- `.visitCommand()` — Start traversal from a command node.
+- `.visitExpression()` — Start traversal from an expression node.
+
+
+### Specifying Callbacks
+
+The simplest way to traverse the AST is to specify the below three callbacks:
+
+- `visitQuery` — Called for every query node. (Normally once.)
+- `visitCommand` — Called for every command node.
+- `visitExpression` — Called for every expression node.
+
+
+However, you can be more specific and specify callbacks for commands and
+expression types. This way the context `ctx` provided to the callback will have
+helpful methods specific to the node type.
+
+When a more specific callback is not found, the generic `visitCommand` or
+`visitExpression` callbacks are not called for that node.
+
+You can specify a specific callback for each command, instead of the generic
+`visitCommand`:
+
+- `visitFromCommand` — Called for every `FROM` command node.
+- `visitLimitCommand` — Called for every `LIMIT` command node.
+- `visitExplainCommand` — Called for every `EXPLAIN` command node.
+- `visitRowCommand` — Called for every `ROW` command node.
+- `visitMetricsCommand` — Called for every `METRICS` command node.
+- `visitShowCommand` — Called for every `SHOW` command node.
+- `visitMetaCommand` — Called for every `META` command node.
+- `visitEvalCommand` — Called for every `EVAL` command node.
+- `visitStatsCommand` — Called for every `STATS` command node.
+- `visitInlineStatsCommand` — Called for every `INLINESTATS` command node.
+- `visitLookupCommand` — Called for every `LOOKUP` command node.
+- `visitKeepCommand` — Called for every `KEEP` command node.
+- `visitSortCommand` — Called for every `SORT` command node.
+- `visitWhereCommand` — Called for every `WHERE` command node.
+- `visitDropCommand` — Called for every `DROP` command node.
+- `visitRenameCommand` — Called for every `RENAME` command node.
+- `visitDissectCommand` — Called for every `DISSECT` command node.
+- `visitGrokCommand` — Called for every `GROK` command node.
+- `visitEnrichCommand` — Called for every `ENRICH` command node.
+- `visitMvExpandCommand` — Called for every `MV_EXPAND` command node.
+
+Similarly, you can specify a specific callback for each expression type, instead
+of the generic `visitExpression`:
+
+- `visitColumnExpression` — Called for every column expression node, say
+ `@timestamp`.
+- `visitSourceExpression` — Called for every source expression node, say
+ `tsdb_index`.
+- `visitFunctionCallExpression` — Called for every function call
+ expression node. Including binary expressions, such as `a + b`.
+- `visitLiteralExpression` — Called for every literal expression node, say
+ `123`, `"hello"`.
+- `visitListLiteralExpression` — Called for every list literal expression
+ node, say `[1, 2, 3]`, `["a", "b", "c"]`.
+- `visitTimeIntervalLiteralExpression` — Called for every time interval
+ literal expression node, say `1h`, `1d`, `1w`.
+- `visitInlineCastExpression` — Called for every inline cast expression
+ node, say `abc::int`, `def::string`.
+- `visitRenameExpression` — Called for every rename expression node, say
+ `a AS b`.
+- `visitOrderExpression` — Called for every order expression node, say
+ `@timestamp ASC`.
+
+
+### Using the Node Context
+
+Each visitor callback receives a `ctx` object, which contains the reference to
+the parent node's context:
+
+```typescript
+new Visitor()
+ .on('visitExpression', (ctx) => {
+ ctx.parent
+ });
+```
+
+Each visitor callback also contains various methods to visit the children nodes,
+if needed. For example, to visit all arguments of a command node:
+
+```typescript
+const expressions = [];
+
+new Visitor()
+ .on('visitExpression', (ctx) => expressions.push(ctx.node));
+ .on('visitCommand', (ctx) => {
+ for (const output of ctx.visitArguments()) {
+ }
+ });
+```
+
+The node context object may also have node specific methods. For example, the
+`LIMIT` command context has the `.numeric()` method, which returns the numeric
+value of the `LIMIT` command:
+
+```typescript
+new Visitor()
+ .on('visitLimitCommand', (ctx) => {
+ console.log(ctx.numeric());
+ })
+ .on('visitCommand', () => null)
+ .on('visitQuery', (ctx) => [...ctx.visitCommands()])
+ .visitQuery(root);
+```
+
+
+### Using the Visitor Output
+
+Each visitor callback can return a *output*, which is then passed to the parent
+callback. This allows to pass information from the child node to the parent
+node.
+
+For example, the below code snippet collects all column names in the AST:
+
+```typescript
+const columns = new Visitor()
+ .on('visitExpression', (ctx) => null)
+ .on('visitColumnExpression', (ctx) => ctx.node.name)
+ .on('visitCommand', (ctx) => [...ctx.visitArguments()])
+ .on('visitQuery', (ctx) => [...ctx.visitCommands()])
+ .visitQuery(root);
+```
+
+
+### Using the Visitor Input
+
+Analogous to the output, each visitor callback can receive an *input* value.
+This allows to pass information from the parent node to the child node.
+
+For example, the below code snippet prints all column names prefixed with the
+text `"prefix"`:
+
+```typescript
+new Visitor()
+ .on('visitExpression', (ctx) => null)
+ .on('visitColumnExpression', (ctx, INPUT) => console.log(INPUT + ctx.node.name))
+ .on('visitCommand', (ctx) => [...ctx.visitArguments("prefix")])
+ .on('visitQuery', (ctx) => [...ctx.visitCommands()])
+ .visitQuery(root);
+```
diff --git a/packages/kbn-esql-ast/src/walker/README.md b/packages/kbn-esql-ast/src/walker/README.md
index 74e834e9095bc..4614350279b0c 100644
--- a/packages/kbn-esql-ast/src/walker/README.md
+++ b/packages/kbn-esql-ast/src/walker/README.md
@@ -1,41 +1,118 @@
-# ES|QL AST Walker
+# `Walker` Traversal API
-The ES|QL AST Walker is a utility that traverses the ES|QL AST and provides a
-set of callbacks that can be used to perform introspection of the AST.
+The ES|QL AST `Walker` is a utility that traverses the ES|QL AST. The developer
+can provide a set of callbacks which are called when the walker visits a
+specific type of node.
+
+The `Walker` utility allows to traverse the AST starting from any node, not just
+the root node.
+
+
+## Low-level API
To start a new *walk* you create a `Walker` instance and call the `walk()` method
with the AST node to start the walk from.
```ts
-
-import { Walker, getAstAndSyntaxErrors } from '@kbn/esql-ast';
+import { Walker } from '@kbn/esql-ast';
const walker = new Walker({
- // Called every time a function node is visited.
- visitFunction: (fn) => {
+ /**
+ * Visit commands
+ */
+ visitCommand: (node: ESQLCommand) => {
+ // Called for every command node.
+ },
+ visitCommandOption: (node: ESQLCommandOption) => {
+ // Called for every command option node.
+ },
+
+ /**
+ * Visit expressions
+ */
+ visitFunction: (fn: ESQLFunction) => {
+ // Called every time a function expression is visited.
console.log('Function:', fn.name);
},
- // Called every time a source identifier node is visited.
- visitSource: (source) => {
+ visitSource: (source: ESQLSource) => {
+ // Called every time a source identifier expression is visited.
console.log('Source:', source.name);
},
+ visitQuery: (node: ESQLAstQueryExpression) => {
+ // Called for every query node.
+ },
+ visitColumn: (node: ESQLColumn) => {
+ // Called for every column node.
+ },
+ visitLiteral: (node: ESQLLiteral) => {
+ // Called for every literal node.
+ },
+ visitListLiteral: (node: ESQLList) => {
+ // Called for every list literal node.
+ },
+ visitTimeIntervalLiteral: (node: ESQLTimeInterval) => {
+ // Called for every time interval literal node.
+ },
+ visitInlineCast: (node: ESQLInlineCast) => {
+ // Called for every inline cast node.
+ },
});
-const { ast } = getAstAndSyntaxErrors('FROM source | STATS fn()');
walker.walk(ast);
```
-Conceptual structure of an ES|QL AST:
-
-- A single ES|QL query is composed of one or more source commands and zero or
- more transformation commands.
-- Each command is represented by a `command` node.
-- Each command contains a list expressions named in ES|QL AST as *AST Item*.
- - `function` — function call expression.
- - `option` — a list of expressions with a specific role in the command.
- - `source` — s source identifier expression.
- - `column` — a field identifier expression.
- - `timeInterval` — a time interval expression.
- - `list` — a list literal expression.
- - `literal` — a literal expression.
- - `inlineCast` — an inline cast expression.
+It is also possible to provide a single `visitAny` callback that is called for
+any node type that does not have a specific visitor.
+
+```ts
+import { Walker } from '@kbn/esql-ast';
+
+const walker = new Walker({
+ visitAny?: (node: ESQLProperNode) => {
+ // Called for any node type that does not have a specific visitor.
+ },
+});
+
+walker.walk(ast);
+```
+
+
+## High-level API
+
+There are few high-level utility functions that are implemented on top of the
+low-level API, for your convenience:
+
+- `Walker.walk` — Walks the AST and calls the appropriate visitor functions.
+- `Walker.commands` — Walks the AST and extracts all command statements.
+- `Walker.params` — Walks the AST and extracts all parameter literals.
+- `Walker.find` — Finds and returns the first node that matches the search criteria.
+- `Walker.findAll` — Finds and returns all nodes that match the search criteria.
+- `Walker.match` — Matches a single node against a template object.
+- `Walker.matchAll` — Matches all nodes against a template object.
+- `Walker.findFunction` — Finds the first function that matches the predicate.
+- `Walker.hasFunction` — Searches for at least one occurrence of a function or expression in the AST.
+- `Walker.visitComments` — Visits all comments in the AST.
+
+The `Walker.walk()` method is simply a sugar syntax around the low-level
+`new Walker().walk()` method.
+
+The `Walker.commands()` method returns a list of all commands. This also
+includes nested commands, once they become supported in ES|QL.
+
+The `Walker.params()` method collects all param literals, such as unnamed `?` or
+named `?param`, or ordered `?1`.
+
+The `Walker.find()` and `Walker.findAll()` methods are used to search for nodes
+in the AST that match a specific criteria. The criteria is specified using a
+predicate function.
+
+The `Walker.match()` and `Walker.matchAll()` methods are also used to search for
+nodes in the AST, but unlike `find` and `findAll`, they use a template object
+to match the nodes.
+
+The `Walker.findFunction()` is a simple utility to find the first function that
+matches a predicate. The `Walker.hasFunction()` returns `true` if at least one
+function or expression in the AST matches the predicate.
+
+The `Walker.visitComments()` method is used to visit all comments in the AST.
+You specify a callback that is called for each comment node.
From 7aa64b6ed59488ab10a5136199b69de0c86668af Mon Sep 17 00:00:00 2001
From: Julia Rechkunova
Date: Mon, 30 Sep 2024 18:20:21 +0200
Subject: [PATCH 021/107] [OneDiscover] Add EBT event to track field usage
(#193996)
- Closes https://github.com/elastic/kibana/issues/186156
- Closes https://github.com/elastic/kibana/issues/189454
## Summary
This PR adds new EBT event type `discover_field_usage` which we use for
tracking adding and removing grid columns and adding filters via
+/-/exists buttons. Properties of the added events consist of:
`eventType`: `dataTableSelection`, `dataTableRemoval`, or
`filterAddition`
`fieldName`: name of the field if it's from ECS schema
`filterOperation`: `+`, `-`, or `_exists_`
## Testing
Enable "Usage collection" global setting.
Navigate to Discover and observe `kibana-browser` requests in Network
tab.
### Checklist
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
---
.../discover/public/__mocks__/services.ts | 2 +
.../application/context/context_app.test.tsx | 1 +
.../application/context/context_app.tsx | 31 +-
.../components/layout/discover_documents.tsx | 32 +-
.../components/layout/discover_layout.tsx | 40 ++-
src/plugins/discover/public/build_services.ts | 10 +-
.../context_awareness/__mocks__/index.tsx | 8 +-
.../profiles_manager.test.ts | 4 +-
.../context_awareness/profiles_manager.ts | 10 +-
src/plugins/discover/public/plugin.tsx | 86 +++--
.../discover_ebt_context_manager.test.ts | 95 -----
.../services/discover_ebt_context_manager.ts | 75 ----
.../services/discover_ebt_manager.test.ts | 242 +++++++++++++
.../public/services/discover_ebt_manager.ts | 219 ++++++++++++
.../context_awareness/_data_source_profile.ts | 101 +-----
.../discover/context_awareness/_telemetry.ts | 326 ++++++++++++++++++
.../apps/discover/context_awareness/index.ts | 1 +
17 files changed, 946 insertions(+), 337 deletions(-)
delete mode 100644 src/plugins/discover/public/services/discover_ebt_context_manager.test.ts
delete mode 100644 src/plugins/discover/public/services/discover_ebt_context_manager.ts
create mode 100644 src/plugins/discover/public/services/discover_ebt_manager.test.ts
create mode 100644 src/plugins/discover/public/services/discover_ebt_manager.ts
create mode 100644 test/functional/apps/discover/context_awareness/_telemetry.ts
diff --git a/src/plugins/discover/public/__mocks__/services.ts b/src/plugins/discover/public/__mocks__/services.ts
index 3d78239558f3e..f00d105444630 100644
--- a/src/plugins/discover/public/__mocks__/services.ts
+++ b/src/plugins/discover/public/__mocks__/services.ts
@@ -45,6 +45,7 @@ import { SearchResponse } from '@elastic/elasticsearch/lib/api/types';
import { urlTrackerMock } from './url_tracker.mock';
import { createElement } from 'react';
import { createContextAwarenessMocks } from '../context_awareness/__mocks__';
+import { DiscoverEBTManager } from '../services/discover_ebt_manager';
export function createDiscoverServicesMock(): DiscoverServices {
const dataPlugin = dataPluginMock.createStartContract();
@@ -245,6 +246,7 @@ export function createDiscoverServicesMock(): DiscoverServices {
singleDocLocator: { getRedirectUrl: jest.fn(() => '') },
urlTracker: urlTrackerMock,
profilesManager: profilesManagerMock,
+ ebtManager: new DiscoverEBTManager(),
setHeaderActionMenu: jest.fn(),
} as unknown as DiscoverServices;
}
diff --git a/src/plugins/discover/public/application/context/context_app.test.tsx b/src/plugins/discover/public/application/context/context_app.test.tsx
index 9c77d1e40bbb2..7a99194cad575 100644
--- a/src/plugins/discover/public/application/context/context_app.test.tsx
+++ b/src/plugins/discover/public/application/context/context_app.test.tsx
@@ -72,6 +72,7 @@ describe('ContextApp test', () => {
contextLocator: { getRedirectUrl: jest.fn(() => '') },
singleDocLocator: { getRedirectUrl: jest.fn(() => '') },
profilesManager: discoverServices.profilesManager,
+ ebtManager: discoverServices.ebtManager,
timefilter: discoverServices.timefilter,
uiActions: discoverServices.uiActions,
} as unknown as DiscoverServices;
diff --git a/src/plugins/discover/public/application/context/context_app.tsx b/src/plugins/discover/public/application/context/context_app.tsx
index e0dfa985b594e..b0fc1342a8f72 100644
--- a/src/plugins/discover/public/application/context/context_app.tsx
+++ b/src/plugins/discover/public/application/context/context_app.tsx
@@ -56,6 +56,8 @@ export const ContextApp = ({ dataView, anchorId, referrer }: ContextAppProps) =>
navigation,
filterManager,
core,
+ ebtManager,
+ fieldsMetadata,
} = services;
const isLegacy = useMemo(() => uiSettings.get(DOC_TABLE_LEGACY), [uiSettings]);
@@ -199,15 +201,36 @@ export const ContextApp = ({ dataView, anchorId, referrer }: ContextAppProps) =>
);
const addFilter = useCallback(
- async (field: DataViewField | string, values: unknown, operation: string) => {
+ async (field: DataViewField | string, values: unknown, operation: '+' | '-') => {
const newFilters = generateFilters(filterManager, field, values, operation, dataView);
filterManager.addFilters(newFilters);
if (dataViews) {
const fieldName = typeof field === 'string' ? field : field.name;
await popularizeField(dataView, fieldName, dataViews, capabilities);
+ void ebtManager.trackFilterAddition({
+ fieldName: fieldName === '_exists_' ? String(values) : fieldName,
+ filterOperation: fieldName === '_exists_' ? '_exists_' : operation,
+ fieldsMetadata,
+ });
}
},
- [filterManager, dataViews, dataView, capabilities]
+ [filterManager, dataViews, dataView, capabilities, ebtManager, fieldsMetadata]
+ );
+
+ const onAddColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onAddColumn(columnName);
+ void ebtManager.trackDataTableSelection({ fieldName: columnName, fieldsMetadata });
+ },
+ [onAddColumn, ebtManager, fieldsMetadata]
+ );
+
+ const onRemoveColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onRemoveColumn(columnName);
+ void ebtManager.trackDataTableRemoval({ fieldName: columnName, fieldsMetadata });
+ },
+ [onRemoveColumn, ebtManager, fieldsMetadata]
);
const TopNavMenu = navigation.ui.AggregateQueryTopNavMenu;
@@ -271,8 +294,8 @@ export const ContextApp = ({ dataView, anchorId, referrer }: ContextAppProps) =>
isLegacy={isLegacy}
columns={columns}
grid={appState.grid}
- onAddColumn={onAddColumn}
- onRemoveColumn={onRemoveColumn}
+ onAddColumn={onAddColumnWithTracking}
+ onRemoveColumn={onRemoveColumnWithTracking}
onSetColumns={onSetColumns}
predecessorCount={appState.predecessorCount}
successorCount={appState.successorCount}
diff --git a/src/plugins/discover/public/application/main/components/layout/discover_documents.tsx b/src/plugins/discover/public/application/main/components/layout/discover_documents.tsx
index 2fe2a4f5a8f93..77befc4dc334f 100644
--- a/src/plugins/discover/public/application/main/components/layout/discover_documents.tsx
+++ b/src/plugins/discover/public/application/main/components/layout/discover_documents.tsx
@@ -117,7 +117,7 @@ function DiscoverDocumentsComponent({
const services = useDiscoverServices();
const documents$ = stateContainer.dataState.data$.documents$;
const savedSearch = useSavedSearchInitial();
- const { dataViews, capabilities, uiSettings, uiActions } = services;
+ const { dataViews, capabilities, uiSettings, uiActions, ebtManager, fieldsMetadata } = services;
const [
dataSource,
query,
@@ -200,6 +200,22 @@ function DiscoverDocumentsComponent({
settings: grid,
});
+ const onAddColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onAddColumn(columnName);
+ void ebtManager.trackDataTableSelection({ fieldName: columnName, fieldsMetadata });
+ },
+ [onAddColumn, ebtManager, fieldsMetadata]
+ );
+
+ const onRemoveColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onRemoveColumn(columnName);
+ void ebtManager.trackDataTableRemoval({ fieldName: columnName, fieldsMetadata });
+ },
+ [onRemoveColumn, ebtManager, fieldsMetadata]
+ );
+
const setExpandedDoc = useCallback(
(doc: DataTableRecord | undefined) => {
stateContainer.internalState.transitions.setExpandedDoc(doc);
@@ -299,14 +315,22 @@ function DiscoverDocumentsComponent({
columnsMeta={customColumnsMeta}
savedSearchId={savedSearch.id}
onFilter={onAddFilter}
- onRemoveColumn={onRemoveColumn}
- onAddColumn={onAddColumn}
+ onRemoveColumn={onRemoveColumnWithTracking}
+ onAddColumn={onAddColumnWithTracking}
onClose={() => setExpandedDoc(undefined)}
setExpandedDoc={setExpandedDoc}
query={query}
/>
),
- [dataView, onAddColumn, onAddFilter, onRemoveColumn, query, savedSearch.id, setExpandedDoc]
+ [
+ dataView,
+ onAddColumnWithTracking,
+ onAddFilter,
+ onRemoveColumnWithTracking,
+ query,
+ savedSearch.id,
+ setExpandedDoc,
+ ]
);
const configRowHeight = uiSettings.get(ROW_HEIGHT_OPTION);
diff --git a/src/plugins/discover/public/application/main/components/layout/discover_layout.tsx b/src/plugins/discover/public/application/main/components/layout/discover_layout.tsx
index 49e645e3f2206..bc9cad72a5eb6 100644
--- a/src/plugins/discover/public/application/main/components/layout/discover_layout.tsx
+++ b/src/plugins/discover/public/application/main/components/layout/discover_layout.tsx
@@ -78,6 +78,8 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
spaces,
observabilityAIAssistant,
dataVisualizer: dataVisualizerService,
+ ebtManager,
+ fieldsMetadata,
} = useDiscoverServices();
const pageBackgroundColor = useEuiBackgroundColor('plain');
const globalQueryState = data.query.getState();
@@ -154,6 +156,22 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
settings: grid,
});
+ const onAddColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onAddColumn(columnName);
+ void ebtManager.trackDataTableSelection({ fieldName: columnName, fieldsMetadata });
+ },
+ [onAddColumn, ebtManager, fieldsMetadata]
+ );
+
+ const onRemoveColumnWithTracking = useCallback(
+ (columnName: string) => {
+ onRemoveColumn(columnName);
+ void ebtManager.trackDataTableRemoval({ fieldName: columnName, fieldsMetadata });
+ },
+ [onRemoveColumn, ebtManager, fieldsMetadata]
+ );
+
// The assistant is getting the state from the url correctly
// expect from the index pattern where we have only the dataview id
useEffect(() => {
@@ -175,9 +193,14 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
if (trackUiMetric) {
trackUiMetric(METRIC_TYPE.CLICK, 'filter_added');
}
+ void ebtManager.trackFilterAddition({
+ fieldName: fieldName === '_exists_' ? String(values) : fieldName,
+ filterOperation: fieldName === '_exists_' ? '_exists_' : operation,
+ fieldsMetadata,
+ });
return filterManager.addFilters(newFilters);
},
- [filterManager, dataView, dataViews, trackUiMetric, capabilities]
+ [filterManager, dataView, dataViews, trackUiMetric, capabilities, ebtManager, fieldsMetadata]
);
const getOperator = (fieldName: string, values: unknown, operation: '+' | '-') => {
@@ -222,8 +245,13 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
if (trackUiMetric) {
trackUiMetric(METRIC_TYPE.CLICK, 'esql_filter_added');
}
+ void ebtManager.trackFilterAddition({
+ fieldName: fieldName === '_exists_' ? String(values) : fieldName,
+ filterOperation: fieldName === '_exists_' ? '_exists_' : operation,
+ fieldsMetadata,
+ });
},
- [data.query.queryString, query, trackUiMetric]
+ [data.query.queryString, query, trackUiMetric, ebtManager, fieldsMetadata]
);
const onFilter = isEsqlMode ? onPopulateWhereClause : onAddFilter;
@@ -274,8 +302,8 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
return undefined;
}
- return () => onAddColumn(draggingFieldName);
- }, [onAddColumn, draggingFieldName, currentColumns]);
+ return () => onAddColumnWithTracking(draggingFieldName);
+ }, [onAddColumnWithTracking, draggingFieldName, currentColumns]);
const [sidebarToggleState$] = useState>(
() => new BehaviorSubject({ isCollapsed: false, toggle: () => {} })
@@ -396,10 +424,10 @@ export function DiscoverLayout({ stateContainer }: DiscoverLayoutProps) {
sidebarPanel={
{
const { usageCollection } = plugins;
@@ -223,7 +223,7 @@ export const buildServices = memoize(
noDataPage: plugins.noDataPage,
observabilityAIAssistant: plugins.observabilityAIAssistant,
profilesManager,
- ebtContextManager,
+ ebtManager,
fieldsMetadata: plugins.fieldsMetadata,
logsDataAccess: plugins.logsDataAccess,
};
diff --git a/src/plugins/discover/public/context_awareness/__mocks__/index.tsx b/src/plugins/discover/public/context_awareness/__mocks__/index.tsx
index a15b7aa26a8a0..153d401cc980a 100644
--- a/src/plugins/discover/public/context_awareness/__mocks__/index.tsx
+++ b/src/plugins/discover/public/context_awareness/__mocks__/index.tsx
@@ -23,7 +23,7 @@ import {
} from '../profiles';
import { ProfileProviderServices } from '../profile_providers/profile_provider_services';
import { ProfilesManager } from '../profiles_manager';
-import { DiscoverEBTContextManager } from '../../services/discover_ebt_context_manager';
+import { DiscoverEBTManager } from '../../services/discover_ebt_manager';
import { createLogsContextServiceMock } from '@kbn/discover-utils/src/__mocks__';
export const createContextAwarenessMocks = ({
@@ -152,12 +152,12 @@ export const createContextAwarenessMocks = ({
documentProfileServiceMock.registerProvider(documentProfileProviderMock);
}
- const ebtContextManagerMock = new DiscoverEBTContextManager();
+ const ebtManagerMock = new DiscoverEBTManager();
const profilesManagerMock = new ProfilesManager(
rootProfileServiceMock,
dataSourceProfileServiceMock,
documentProfileServiceMock,
- ebtContextManagerMock
+ ebtManagerMock
);
const profileProviderServices = createProfileProviderServicesMock();
@@ -173,7 +173,7 @@ export const createContextAwarenessMocks = ({
contextRecordMock2,
profilesManagerMock,
profileProviderServices,
- ebtContextManagerMock,
+ ebtManagerMock,
};
};
diff --git a/src/plugins/discover/public/context_awareness/profiles_manager.test.ts b/src/plugins/discover/public/context_awareness/profiles_manager.test.ts
index 87965edbe7488..da5ad8b56dcf3 100644
--- a/src/plugins/discover/public/context_awareness/profiles_manager.test.ts
+++ b/src/plugins/discover/public/context_awareness/profiles_manager.test.ts
@@ -21,7 +21,7 @@ describe('ProfilesManager', () => {
beforeEach(() => {
jest.clearAllMocks();
mocks = createContextAwarenessMocks();
- jest.spyOn(mocks.ebtContextManagerMock, 'updateProfilesContextWith');
+ jest.spyOn(mocks.ebtManagerMock, 'updateProfilesContextWith');
});
it('should return default profiles', () => {
@@ -62,7 +62,7 @@ describe('ProfilesManager', () => {
mocks.documentProfileProviderMock.profile,
]);
- expect(mocks.ebtContextManagerMock.updateProfilesContextWith).toHaveBeenCalledWith([
+ expect(mocks.ebtManagerMock.updateProfilesContextWith).toHaveBeenCalledWith([
'root-profile',
'data-source-profile',
]);
diff --git a/src/plugins/discover/public/context_awareness/profiles_manager.ts b/src/plugins/discover/public/context_awareness/profiles_manager.ts
index 2c8b1c7d16cb0..6b7bef5e02294 100644
--- a/src/plugins/discover/public/context_awareness/profiles_manager.ts
+++ b/src/plugins/discover/public/context_awareness/profiles_manager.ts
@@ -25,7 +25,7 @@ import type {
DocumentContext,
} from './profiles';
import type { ContextWithProfileId } from './profile_service';
-import { DiscoverEBTContextManager } from '../services/discover_ebt_context_manager';
+import { DiscoverEBTManager } from '../services/discover_ebt_manager';
interface SerializedRootProfileParams {
solutionNavId: RootProfileProviderParams['solutionNavId'];
@@ -53,7 +53,7 @@ export interface GetProfilesOptions {
export class ProfilesManager {
private readonly rootContext$: BehaviorSubject>;
private readonly dataSourceContext$: BehaviorSubject>;
- private readonly ebtContextManager: DiscoverEBTContextManager;
+ private readonly ebtManager: DiscoverEBTManager;
private prevRootProfileParams?: SerializedRootProfileParams;
private prevDataSourceProfileParams?: SerializedDataSourceProfileParams;
@@ -64,11 +64,11 @@ export class ProfilesManager {
private readonly rootProfileService: RootProfileService,
private readonly dataSourceProfileService: DataSourceProfileService,
private readonly documentProfileService: DocumentProfileService,
- ebtContextManager: DiscoverEBTContextManager
+ ebtManager: DiscoverEBTManager
) {
this.rootContext$ = new BehaviorSubject(rootProfileService.defaultContext);
this.dataSourceContext$ = new BehaviorSubject(dataSourceProfileService.defaultContext);
- this.ebtContextManager = ebtContextManager;
+ this.ebtManager = ebtManager;
}
/**
@@ -206,7 +206,7 @@ export class ProfilesManager {
private trackActiveProfiles(rootContextProfileId: string, dataSourceContextProfileId: string) {
const dscProfiles = [rootContextProfileId, dataSourceContextProfileId];
- this.ebtContextManager.updateProfilesContextWith(dscProfiles);
+ this.ebtManager.updateProfilesContextWith(dscProfiles);
}
}
diff --git a/src/plugins/discover/public/plugin.tsx b/src/plugins/discover/public/plugin.tsx
index e6430f82c62fe..dbbcc90a7d451 100644
--- a/src/plugins/discover/public/plugin.tsx
+++ b/src/plugins/discover/public/plugin.tsx
@@ -59,7 +59,7 @@ import { RootProfileService } from './context_awareness/profiles/root_profile';
import { DataSourceProfileService } from './context_awareness/profiles/data_source_profile';
import { DocumentProfileService } from './context_awareness/profiles/document_profile';
import { ProfilesManager } from './context_awareness/profiles_manager';
-import { DiscoverEBTContextManager } from './services/discover_ebt_context_manager';
+import { DiscoverEBTManager } from './services/discover_ebt_manager';
/**
* Contains Discover, one of the oldest parts of Kibana
@@ -149,8 +149,12 @@ export class DiscoverPlugin
this.urlTracker = { setTrackedUrl, restorePreviousUrl, setTrackingEnabled };
this.stopUrlTracking = stopUrlTracker;
- const ebtContextManager = new DiscoverEBTContextManager();
- ebtContextManager.initialize({ core });
+ const ebtManager = new DiscoverEBTManager();
+ ebtManager.initialize({
+ core,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: true,
+ });
core.application.register({
id: PLUGIN_ID,
@@ -176,7 +180,7 @@ export class DiscoverPlugin
window.dispatchEvent(new HashChangeEvent('hashchange'));
});
- ebtContextManager.enable();
+ ebtManager.enableContext();
const services = buildServices({
core: coreStart,
@@ -188,12 +192,12 @@ export class DiscoverPlugin
history: this.historyService.getHistory(),
scopedHistory: this.scopedHistory,
urlTracker: this.urlTracker!,
- profilesManager: await this.createProfilesManager(
- coreStart,
- discoverStartPlugins,
- ebtContextManager
- ),
- ebtContextManager,
+ profilesManager: await this.createProfilesManager({
+ core: coreStart,
+ plugins: discoverStartPlugins,
+ ebtManager,
+ }),
+ ebtManager,
setHeaderActionMenu: params.setHeaderActionMenu,
});
@@ -226,7 +230,7 @@ export class DiscoverPlugin
});
return () => {
- ebtContextManager.disableAndReset();
+ ebtManager.disableAndResetContext();
unlistenParentHistory();
unmount();
appUnMounted();
@@ -296,11 +300,12 @@ export class DiscoverPlugin
}
const getDiscoverServicesInternal = () => {
+ const ebtManager = new DiscoverEBTManager(); // It is not initialized outside of Discover
return this.getDiscoverServices(
core,
plugins,
- this.createEmptyProfilesManager(),
- new DiscoverEBTContextManager() // it's not enabled outside of Discover
+ this.createEmptyProfilesManager({ ebtManager }),
+ ebtManager
);
};
@@ -326,11 +331,15 @@ export class DiscoverPlugin
return { rootProfileService, dataSourceProfileService, documentProfileService };
}
- private createProfilesManager = async (
- core: CoreStart,
- plugins: DiscoverStartPlugins,
- ebtContextManager: DiscoverEBTContextManager
- ) => {
+ private async createProfilesManager({
+ core,
+ plugins,
+ ebtManager,
+ }: {
+ core: CoreStart;
+ plugins: DiscoverStartPlugins;
+ ebtManager: DiscoverEBTManager;
+ }) {
const { registerProfileProviders } = await import('./context_awareness/profile_providers');
const { rootProfileService, dataSourceProfileService, documentProfileService } =
this.createProfileServices();
@@ -341,7 +350,7 @@ export class DiscoverPlugin
rootProfileService,
dataSourceProfileService,
documentProfileService,
- ebtContextManager
+ ebtManager
);
await registerProfileProviders({
@@ -349,21 +358,18 @@ export class DiscoverPlugin
dataSourceProfileService,
documentProfileService,
enabledExperimentalProfileIds,
- services: this.getDiscoverServices(core, plugins, profilesManager, ebtContextManager),
+ services: this.getDiscoverServices(core, plugins, profilesManager, ebtManager),
});
return profilesManager;
- };
-
- private createEmptyProfilesManager() {
- const { rootProfileService, dataSourceProfileService, documentProfileService } =
- this.createProfileServices();
+ }
+ private createEmptyProfilesManager({ ebtManager }: { ebtManager: DiscoverEBTManager }) {
return new ProfilesManager(
- rootProfileService,
- dataSourceProfileService,
- documentProfileService,
- new DiscoverEBTContextManager() // it's not enabled outside of Discover
+ new RootProfileService(),
+ new DataSourceProfileService(),
+ new DocumentProfileService(),
+ ebtManager
);
}
@@ -371,7 +377,7 @@ export class DiscoverPlugin
core: CoreStart,
plugins: DiscoverStartPlugins,
profilesManager: ProfilesManager,
- ebtContextManager: DiscoverEBTContextManager
+ ebtManager: DiscoverEBTManager
) => {
return buildServices({
core,
@@ -383,11 +389,13 @@ export class DiscoverPlugin
history: this.historyService.getHistory(),
urlTracker: this.urlTracker!,
profilesManager,
- ebtContextManager,
+ ebtManager,
});
};
private registerEmbeddable(core: CoreSetup, plugins: DiscoverSetupPlugins) {
+ const ebtManager = new DiscoverEBTManager(); // It is not initialized outside of Discover
+
const getStartServices = async () => {
const [coreStart, deps] = await core.getStartServices();
return {
@@ -396,16 +404,20 @@ export class DiscoverPlugin
};
};
- const getDiscoverServicesInternal = async () => {
+ const getDiscoverServicesForEmbeddable = async () => {
const [coreStart, deps] = await core.getStartServices();
- const ebtContextManager = new DiscoverEBTContextManager(); // it's not enabled outside of Discover
- const profilesManager = await this.createProfilesManager(coreStart, deps, ebtContextManager);
- return this.getDiscoverServices(coreStart, deps, profilesManager, ebtContextManager);
+
+ const profilesManager = await this.createProfilesManager({
+ core: coreStart,
+ plugins: deps,
+ ebtManager,
+ });
+ return this.getDiscoverServices(coreStart, deps, profilesManager, ebtManager);
};
plugins.embeddable.registerReactEmbeddableSavedObject({
onAdd: async (container, savedObject) => {
- const services = await getDiscoverServicesInternal();
+ const services = await getDiscoverServicesForEmbeddable();
const initialState = await deserializeState({
serializedState: {
rawState: { savedObjectId: savedObject.id },
@@ -429,7 +441,7 @@ export class DiscoverPlugin
plugins.embeddable.registerReactEmbeddableFactory(SEARCH_EMBEDDABLE_TYPE, async () => {
const [startServices, discoverServices, { getSearchEmbeddableFactory }] = await Promise.all([
getStartServices(),
- getDiscoverServicesInternal(),
+ getDiscoverServicesForEmbeddable(),
import('./embeddable/get_search_embeddable_factory'),
]);
diff --git a/src/plugins/discover/public/services/discover_ebt_context_manager.test.ts b/src/plugins/discover/public/services/discover_ebt_context_manager.test.ts
deleted file mode 100644
index 3b2836325b671..0000000000000
--- a/src/plugins/discover/public/services/discover_ebt_context_manager.test.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-import { BehaviorSubject } from 'rxjs';
-import { coreMock } from '@kbn/core/public/mocks';
-import { DiscoverEBTContextManager } from './discover_ebt_context_manager';
-
-const coreSetupMock = coreMock.createSetup();
-
-describe('DiscoverEBTContextManager', () => {
- let discoverEBTContextManager: DiscoverEBTContextManager;
-
- beforeEach(() => {
- discoverEBTContextManager = new DiscoverEBTContextManager();
- });
-
- describe('register', () => {
- it('should register the context provider', () => {
- discoverEBTContextManager.initialize({ core: coreSetupMock });
-
- expect(coreSetupMock.analytics.registerContextProvider).toHaveBeenCalledWith({
- name: 'discover_context',
- context$: expect.any(BehaviorSubject),
- schema: {
- discoverProfiles: {
- type: 'array',
- items: {
- type: 'keyword',
- _meta: {
- description: 'List of active Discover context awareness profiles',
- },
- },
- },
- },
- });
- });
- });
-
- describe('updateProfilesWith', () => {
- it('should update the profiles with the provided props', () => {
- const dscProfiles = ['profile1', 'profile2'];
- const dscProfiles2 = ['profile21', 'profile22'];
- discoverEBTContextManager.initialize({ core: coreSetupMock });
- discoverEBTContextManager.enable();
-
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
-
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles2);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles2);
- });
-
- it('should not update the profiles if profile list did not change', () => {
- const dscProfiles = ['profile1', 'profile2'];
- const dscProfiles2 = ['profile1', 'profile2'];
- discoverEBTContextManager.initialize({ core: coreSetupMock });
- discoverEBTContextManager.enable();
-
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
-
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles2);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
- });
-
- it('should not update the profiles if not enabled yet', () => {
- const dscProfiles = ['profile1', 'profile2'];
- discoverEBTContextManager.initialize({ core: coreSetupMock });
-
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
- });
-
- it('should not update the profiles after resetting unless enabled again', () => {
- const dscProfiles = ['profile1', 'profile2'];
- discoverEBTContextManager.initialize({ core: coreSetupMock });
- discoverEBTContextManager.enable();
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
- discoverEBTContextManager.disableAndReset();
- expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
- discoverEBTContextManager.enable();
- discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
- expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
- });
- });
-});
diff --git a/src/plugins/discover/public/services/discover_ebt_context_manager.ts b/src/plugins/discover/public/services/discover_ebt_context_manager.ts
deleted file mode 100644
index 12ea918c495d9..0000000000000
--- a/src/plugins/discover/public/services/discover_ebt_context_manager.ts
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-import { BehaviorSubject } from 'rxjs';
-import { isEqual } from 'lodash';
-import type { CoreSetup } from '@kbn/core-lifecycle-browser';
-
-export interface DiscoverEBTContextProps {
- discoverProfiles: string[]; // Discover Context Awareness Profiles
-}
-export type DiscoverEBTContext = BehaviorSubject;
-
-export class DiscoverEBTContextManager {
- private isEnabled: boolean = false;
- private ebtContext$: DiscoverEBTContext | undefined;
-
- constructor() {}
-
- // https://docs.elastic.dev/telemetry/collection/event-based-telemetry
- public initialize({ core }: { core: CoreSetup }) {
- const context$ = new BehaviorSubject({
- discoverProfiles: [],
- });
-
- core.analytics.registerContextProvider({
- name: 'discover_context',
- context$,
- schema: {
- discoverProfiles: {
- type: 'array',
- items: {
- type: 'keyword',
- _meta: {
- description: 'List of active Discover context awareness profiles',
- },
- },
- },
- // If we decide to extend EBT context with more properties, we can do it here
- },
- });
-
- this.ebtContext$ = context$;
- }
-
- public enable() {
- this.isEnabled = true;
- }
-
- public updateProfilesContextWith(discoverProfiles: DiscoverEBTContextProps['discoverProfiles']) {
- if (
- this.isEnabled &&
- this.ebtContext$ &&
- !isEqual(this.ebtContext$.getValue().discoverProfiles, discoverProfiles)
- ) {
- this.ebtContext$.next({
- discoverProfiles,
- });
- }
- }
-
- public getProfilesContext() {
- return this.ebtContext$?.getValue()?.discoverProfiles;
- }
-
- public disableAndReset() {
- this.updateProfilesContextWith([]);
- this.isEnabled = false;
- }
-}
diff --git a/src/plugins/discover/public/services/discover_ebt_manager.test.ts b/src/plugins/discover/public/services/discover_ebt_manager.test.ts
new file mode 100644
index 0000000000000..0ed20dacdb0ce
--- /dev/null
+++ b/src/plugins/discover/public/services/discover_ebt_manager.test.ts
@@ -0,0 +1,242 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+import { BehaviorSubject } from 'rxjs';
+import { coreMock } from '@kbn/core/public/mocks';
+import { DiscoverEBTManager } from './discover_ebt_manager';
+import { FieldsMetadataPublicStart } from '@kbn/fields-metadata-plugin/public';
+
+describe('DiscoverEBTManager', () => {
+ let discoverEBTContextManager: DiscoverEBTManager;
+
+ const coreSetupMock = coreMock.createSetup();
+
+ const fieldsMetadata = {
+ getClient: jest.fn().mockResolvedValue({
+ find: jest.fn().mockResolvedValue({
+ fields: {
+ test: {
+ short: 'test',
+ },
+ },
+ }),
+ }),
+ } as unknown as FieldsMetadataPublicStart;
+
+ beforeEach(() => {
+ discoverEBTContextManager = new DiscoverEBTManager();
+ });
+
+ describe('register', () => {
+ it('should register the context provider and custom events', () => {
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: true,
+ });
+
+ expect(coreSetupMock.analytics.registerContextProvider).toHaveBeenCalledWith({
+ name: 'discover_context',
+ context$: expect.any(BehaviorSubject),
+ schema: {
+ discoverProfiles: {
+ type: 'array',
+ items: {
+ type: 'keyword',
+ _meta: {
+ description: 'List of active Discover context awareness profiles',
+ },
+ },
+ },
+ },
+ });
+
+ expect(coreSetupMock.analytics.registerEventType).toHaveBeenCalledWith({
+ eventType: 'discover_field_usage',
+ schema: {
+ eventName: {
+ type: 'keyword',
+ _meta: {
+ description:
+ 'The name of the event that is tracked in the metrics i.e. dataTableSelection, dataTableRemoval',
+ },
+ },
+ fieldName: {
+ type: 'keyword',
+ _meta: {
+ description: "Field name if it's a part of ECS schema",
+ optional: true,
+ },
+ },
+ filterOperation: {
+ type: 'keyword',
+ _meta: {
+ description: "Operation type when a filter is added i.e. '+', '-', '_exists_'",
+ optional: true,
+ },
+ },
+ },
+ });
+ });
+ });
+
+ describe('updateProfilesWith', () => {
+ it('should update the profiles with the provided props', () => {
+ const dscProfiles = ['profile1', 'profile2'];
+ const dscProfiles2 = ['profile21', 'profile22'];
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: false,
+ });
+ discoverEBTContextManager.enableContext();
+
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
+
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles2);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles2);
+ });
+
+ it('should not update the profiles if profile list did not change', () => {
+ const dscProfiles = ['profile1', 'profile2'];
+ const dscProfiles2 = ['profile1', 'profile2'];
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: false,
+ });
+ discoverEBTContextManager.enableContext();
+
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
+
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles2);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
+ });
+
+ it('should not update the profiles if not enabled yet', () => {
+ const dscProfiles = ['profile1', 'profile2'];
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: false,
+ });
+
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
+ });
+
+ it('should not update the profiles after resetting unless enabled again', () => {
+ const dscProfiles = ['profile1', 'profile2'];
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: true,
+ shouldInitializeCustomEvents: false,
+ });
+ discoverEBTContextManager.enableContext();
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
+ discoverEBTContextManager.disableAndResetContext();
+ expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toEqual([]);
+ discoverEBTContextManager.enableContext();
+ discoverEBTContextManager.updateProfilesContextWith(dscProfiles);
+ expect(discoverEBTContextManager.getProfilesContext()).toBe(dscProfiles);
+ });
+ });
+
+ describe('trackFieldUsageEvent', () => {
+ it('should track the field usage when a field is added to the table', async () => {
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: false,
+ shouldInitializeCustomEvents: true,
+ });
+
+ await discoverEBTContextManager.trackDataTableSelection({
+ fieldName: 'test',
+ fieldsMetadata,
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenCalledWith('discover_field_usage', {
+ eventName: 'dataTableSelection',
+ fieldName: 'test',
+ });
+
+ await discoverEBTContextManager.trackDataTableSelection({
+ fieldName: 'test2',
+ fieldsMetadata,
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenLastCalledWith('discover_field_usage', {
+ eventName: 'dataTableSelection', // non-ECS fields would not be included in properties
+ });
+ });
+
+ it('should track the field usage when a field is removed from the table', async () => {
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: false,
+ shouldInitializeCustomEvents: true,
+ });
+
+ await discoverEBTContextManager.trackDataTableRemoval({
+ fieldName: 'test',
+ fieldsMetadata,
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenCalledWith('discover_field_usage', {
+ eventName: 'dataTableRemoval',
+ fieldName: 'test',
+ });
+
+ await discoverEBTContextManager.trackDataTableRemoval({
+ fieldName: 'test2',
+ fieldsMetadata,
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenLastCalledWith('discover_field_usage', {
+ eventName: 'dataTableRemoval', // non-ECS fields would not be included in properties
+ });
+ });
+
+ it('should track the field usage when a filter is created', async () => {
+ discoverEBTContextManager.initialize({
+ core: coreSetupMock,
+ shouldInitializeCustomContext: false,
+ shouldInitializeCustomEvents: true,
+ });
+
+ await discoverEBTContextManager.trackFilterAddition({
+ fieldName: 'test',
+ fieldsMetadata,
+ filterOperation: '+',
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenCalledWith('discover_field_usage', {
+ eventName: 'filterAddition',
+ fieldName: 'test',
+ filterOperation: '+',
+ });
+
+ await discoverEBTContextManager.trackFilterAddition({
+ fieldName: 'test2',
+ fieldsMetadata,
+ filterOperation: '_exists_',
+ });
+
+ expect(coreSetupMock.analytics.reportEvent).toHaveBeenLastCalledWith('discover_field_usage', {
+ eventName: 'filterAddition', // non-ECS fields would not be included in properties
+ filterOperation: '_exists_',
+ });
+ });
+ });
+});
diff --git a/src/plugins/discover/public/services/discover_ebt_manager.ts b/src/plugins/discover/public/services/discover_ebt_manager.ts
new file mode 100644
index 0000000000000..420eb6c244444
--- /dev/null
+++ b/src/plugins/discover/public/services/discover_ebt_manager.ts
@@ -0,0 +1,219 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+import { BehaviorSubject } from 'rxjs';
+import { isEqual } from 'lodash';
+import type { CoreSetup } from '@kbn/core-lifecycle-browser';
+import type { FieldsMetadataPublicStart } from '@kbn/fields-metadata-plugin/public';
+
+const FIELD_USAGE_EVENT_TYPE = 'discover_field_usage';
+const FIELD_USAGE_EVENT_NAME = 'eventName';
+const FIELD_USAGE_FIELD_NAME = 'fieldName';
+const FIELD_USAGE_FILTER_OPERATION = 'filterOperation';
+
+type FilterOperation = '+' | '-' | '_exists_';
+
+export enum FieldUsageEventName {
+ dataTableSelection = 'dataTableSelection',
+ dataTableRemoval = 'dataTableRemoval',
+ filterAddition = 'filterAddition',
+}
+interface FieldUsageEventData {
+ [FIELD_USAGE_EVENT_NAME]: FieldUsageEventName;
+ [FIELD_USAGE_FIELD_NAME]?: string;
+ [FIELD_USAGE_FILTER_OPERATION]?: FilterOperation;
+}
+
+export interface DiscoverEBTContextProps {
+ discoverProfiles: string[]; // Discover Context Awareness Profiles
+}
+export type DiscoverEBTContext = BehaviorSubject;
+
+export class DiscoverEBTManager {
+ private isCustomContextEnabled: boolean = false;
+ private customContext$: DiscoverEBTContext | undefined;
+ private reportEvent: CoreSetup['analytics']['reportEvent'] | undefined;
+
+ constructor() {}
+
+ // https://docs.elastic.dev/telemetry/collection/event-based-telemetry
+ public initialize({
+ core,
+ shouldInitializeCustomContext,
+ shouldInitializeCustomEvents,
+ }: {
+ core: CoreSetup;
+ shouldInitializeCustomContext: boolean;
+ shouldInitializeCustomEvents: boolean;
+ }) {
+ if (shouldInitializeCustomContext) {
+ // Register Discover specific context to be used in EBT
+ const context$ = new BehaviorSubject({
+ discoverProfiles: [],
+ });
+ core.analytics.registerContextProvider({
+ name: 'discover_context',
+ context$,
+ schema: {
+ discoverProfiles: {
+ type: 'array',
+ items: {
+ type: 'keyword',
+ _meta: {
+ description: 'List of active Discover context awareness profiles',
+ },
+ },
+ },
+ // If we decide to extend EBT context with more properties, we can do it here
+ },
+ });
+ this.customContext$ = context$;
+ }
+
+ if (shouldInitializeCustomEvents) {
+ // Register Discover events to be used with EBT
+ core.analytics.registerEventType({
+ eventType: FIELD_USAGE_EVENT_TYPE,
+ schema: {
+ [FIELD_USAGE_EVENT_NAME]: {
+ type: 'keyword',
+ _meta: {
+ description:
+ 'The name of the event that is tracked in the metrics i.e. dataTableSelection, dataTableRemoval',
+ },
+ },
+ [FIELD_USAGE_FIELD_NAME]: {
+ type: 'keyword',
+ _meta: {
+ description: "Field name if it's a part of ECS schema",
+ optional: true,
+ },
+ },
+ [FIELD_USAGE_FILTER_OPERATION]: {
+ type: 'keyword',
+ _meta: {
+ description: "Operation type when a filter is added i.e. '+', '-', '_exists_'",
+ optional: true,
+ },
+ },
+ },
+ });
+ this.reportEvent = core.analytics.reportEvent;
+ }
+ }
+
+ public enableContext() {
+ this.isCustomContextEnabled = true;
+ }
+
+ public disableAndResetContext() {
+ this.updateProfilesContextWith([]);
+ this.isCustomContextEnabled = false;
+ }
+
+ public updateProfilesContextWith(discoverProfiles: DiscoverEBTContextProps['discoverProfiles']) {
+ if (
+ this.isCustomContextEnabled &&
+ this.customContext$ &&
+ !isEqual(this.customContext$.getValue().discoverProfiles, discoverProfiles)
+ ) {
+ this.customContext$.next({
+ discoverProfiles,
+ });
+ }
+ }
+
+ public getProfilesContext() {
+ return this.customContext$?.getValue()?.discoverProfiles;
+ }
+
+ private async trackFieldUsageEvent({
+ eventName,
+ fieldName,
+ filterOperation,
+ fieldsMetadata,
+ }: {
+ eventName: FieldUsageEventName;
+ fieldName: string;
+ filterOperation?: FilterOperation;
+ fieldsMetadata: FieldsMetadataPublicStart | undefined;
+ }) {
+ if (!this.reportEvent) {
+ return;
+ }
+
+ const eventData: FieldUsageEventData = {
+ [FIELD_USAGE_EVENT_NAME]: eventName,
+ };
+
+ if (fieldsMetadata) {
+ const client = await fieldsMetadata.getClient();
+ const { fields } = await client.find({
+ attributes: ['short'],
+ fieldNames: [fieldName],
+ });
+
+ // excludes non ECS fields
+ if (fields[fieldName]?.short) {
+ eventData[FIELD_USAGE_FIELD_NAME] = fieldName;
+ }
+ }
+
+ if (filterOperation) {
+ eventData[FIELD_USAGE_FILTER_OPERATION] = filterOperation;
+ }
+
+ this.reportEvent(FIELD_USAGE_EVENT_TYPE, eventData);
+ }
+
+ public async trackDataTableSelection({
+ fieldName,
+ fieldsMetadata,
+ }: {
+ fieldName: string;
+ fieldsMetadata: FieldsMetadataPublicStart | undefined;
+ }) {
+ await this.trackFieldUsageEvent({
+ eventName: FieldUsageEventName.dataTableSelection,
+ fieldName,
+ fieldsMetadata,
+ });
+ }
+
+ public async trackDataTableRemoval({
+ fieldName,
+ fieldsMetadata,
+ }: {
+ fieldName: string;
+ fieldsMetadata: FieldsMetadataPublicStart | undefined;
+ }) {
+ await this.trackFieldUsageEvent({
+ eventName: FieldUsageEventName.dataTableRemoval,
+ fieldName,
+ fieldsMetadata,
+ });
+ }
+
+ public async trackFilterAddition({
+ fieldName,
+ fieldsMetadata,
+ filterOperation,
+ }: {
+ fieldName: string;
+ fieldsMetadata: FieldsMetadataPublicStart | undefined;
+ filterOperation: FilterOperation;
+ }) {
+ await this.trackFieldUsageEvent({
+ eventName: FieldUsageEventName.filterAddition,
+ fieldName,
+ fieldsMetadata,
+ filterOperation,
+ });
+ }
+}
diff --git a/test/functional/apps/discover/context_awareness/_data_source_profile.ts b/test/functional/apps/discover/context_awareness/_data_source_profile.ts
index ecf4b2fb29c4c..35e3552afa655 100644
--- a/test/functional/apps/discover/context_awareness/_data_source_profile.ts
+++ b/test/functional/apps/discover/context_awareness/_data_source_profile.ts
@@ -12,115 +12,16 @@ import expect from '@kbn/expect';
import type { FtrProviderContext } from '../ftr_provider_context';
export default function ({ getService, getPageObjects }: FtrProviderContext) {
- const { common, discover, unifiedFieldList, dashboard, header, timePicker } = getPageObjects([
+ const { common, discover, unifiedFieldList } = getPageObjects([
'common',
'discover',
'unifiedFieldList',
- 'dashboard',
- 'header',
- 'timePicker',
]);
const testSubjects = getService('testSubjects');
const dataViews = getService('dataViews');
const dataGrid = getService('dataGrid');
- const monacoEditor = getService('monacoEditor');
- const ebtUIHelper = getService('kibana_ebt_ui');
- const retry = getService('retry');
- const esArchiver = getService('esArchiver');
- const kibanaServer = getService('kibanaServer');
- const dashboardAddPanel = getService('dashboardAddPanel');
describe('data source profile', () => {
- describe('telemetry', () => {
- before(async () => {
- await esArchiver.loadIfNeeded('test/functional/fixtures/es_archiver/logstash_functional');
- await kibanaServer.importExport.load('test/functional/fixtures/kbn_archiver/discover');
- });
-
- after(async () => {
- await kibanaServer.importExport.unload('test/functional/fixtures/kbn_archiver/discover');
- });
-
- it('should set EBT context for telemetry events with default profile', async () => {
- await common.navigateToApp('discover');
- await discover.selectTextBaseLang();
- await discover.waitUntilSearchingHasFinished();
- await monacoEditor.setCodeEditorValue('from my-example-* | sort @timestamp desc');
- await ebtUIHelper.setOptIn(true);
- await testSubjects.click('querySubmitButton');
- await discover.waitUntilSearchingHasFinished();
-
- const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
- eventTypes: ['performance_metric'],
- withTimeoutMs: 500,
- });
-
- expect(events[events.length - 1].context.discoverProfiles).to.eql([
- 'example-root-profile',
- 'default-data-source-profile',
- ]);
- });
-
- it('should set EBT context for telemetry events when example profile and reset', async () => {
- await common.navigateToApp('discover');
- await discover.selectTextBaseLang();
- await discover.waitUntilSearchingHasFinished();
- await monacoEditor.setCodeEditorValue('from my-example-logs | sort @timestamp desc');
- await ebtUIHelper.setOptIn(true);
- await testSubjects.click('querySubmitButton');
- await discover.waitUntilSearchingHasFinished();
-
- const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
- eventTypes: ['performance_metric'],
- withTimeoutMs: 500,
- });
-
- expect(events[events.length - 1].context.discoverProfiles).to.eql([
- 'example-root-profile',
- 'example-data-source-profile',
- ]);
-
- // should reset the profiles when navigating away from Discover
- await testSubjects.click('logo');
- await retry.waitFor('home page to open', async () => {
- return (await testSubjects.getVisibleText('euiBreadcrumb')) === 'Home';
- });
- await testSubjects.click('addSampleData');
-
- await retry.try(async () => {
- const eventsAfter = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
- eventTypes: ['click'],
- withTimeoutMs: 500,
- });
-
- expect(eventsAfter[eventsAfter.length - 1].context.discoverProfiles).to.eql([]);
- });
- });
-
- it('should not set EBT context for embeddables', async () => {
- await dashboard.navigateToApp();
- await dashboard.gotoDashboardLandingPage();
- await dashboard.clickNewDashboard();
- await timePicker.setDefaultAbsoluteRange();
- await ebtUIHelper.setOptIn(true);
- await dashboardAddPanel.addSavedSearch('A Saved Search');
- await header.waitUntilLoadingHasFinished();
- await dashboard.waitForRenderComplete();
- const rows = await dataGrid.getDocTableRows();
- expect(rows.length).to.be.above(0);
- await testSubjects.click('dashboardEditorMenuButton');
-
- const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
- eventTypes: ['click'],
- withTimeoutMs: 500,
- });
-
- expect(
- events.every((event) => !(event.context.discoverProfiles as string[])?.length)
- ).to.be(true);
- });
- });
-
describe('ES|QL mode', () => {
describe('cell renderers', () => {
it('should render custom @timestamp but not custom log.level', async () => {
diff --git a/test/functional/apps/discover/context_awareness/_telemetry.ts b/test/functional/apps/discover/context_awareness/_telemetry.ts
new file mode 100644
index 0000000000000..587de698f9336
--- /dev/null
+++ b/test/functional/apps/discover/context_awareness/_telemetry.ts
@@ -0,0 +1,326 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+import expect from '@kbn/expect';
+import type { FtrProviderContext } from '../ftr_provider_context';
+
+export default function ({ getService, getPageObjects }: FtrProviderContext) {
+ const { common, discover, unifiedFieldList, dashboard, header, timePicker } = getPageObjects([
+ 'common',
+ 'discover',
+ 'unifiedFieldList',
+ 'dashboard',
+ 'header',
+ 'timePicker',
+ ]);
+ const testSubjects = getService('testSubjects');
+ const dataGrid = getService('dataGrid');
+ const dataViews = getService('dataViews');
+ const monacoEditor = getService('monacoEditor');
+ const ebtUIHelper = getService('kibana_ebt_ui');
+ const retry = getService('retry');
+ const esArchiver = getService('esArchiver');
+ const kibanaServer = getService('kibanaServer');
+ const dashboardAddPanel = getService('dashboardAddPanel');
+
+ describe('telemetry', () => {
+ describe('context', () => {
+ before(async () => {
+ await esArchiver.loadIfNeeded('test/functional/fixtures/es_archiver/logstash_functional');
+ await kibanaServer.importExport.load('test/functional/fixtures/kbn_archiver/discover');
+ });
+
+ after(async () => {
+ await kibanaServer.importExport.unload('test/functional/fixtures/kbn_archiver/discover');
+ });
+
+ it('should set EBT context for telemetry events with default profile', async () => {
+ await common.navigateToApp('discover');
+ await discover.selectTextBaseLang();
+ await discover.waitUntilSearchingHasFinished();
+ await monacoEditor.setCodeEditorValue('from my-example-* | sort @timestamp desc');
+ await ebtUIHelper.setOptIn(true);
+ await testSubjects.click('querySubmitButton');
+ await discover.waitUntilSearchingHasFinished();
+
+ const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['performance_metric'],
+ withTimeoutMs: 500,
+ });
+
+ expect(events[events.length - 1].context.discoverProfiles).to.eql([
+ 'example-root-profile',
+ 'default-data-source-profile',
+ ]);
+ });
+
+ it('should set EBT context for telemetry events when example profile and reset', async () => {
+ await common.navigateToApp('discover');
+ await discover.selectTextBaseLang();
+ await discover.waitUntilSearchingHasFinished();
+ await monacoEditor.setCodeEditorValue('from my-example-logs | sort @timestamp desc');
+ await ebtUIHelper.setOptIn(true);
+ await testSubjects.click('querySubmitButton');
+ await discover.waitUntilSearchingHasFinished();
+
+ const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['performance_metric'],
+ withTimeoutMs: 500,
+ });
+
+ expect(events[events.length - 1].context.discoverProfiles).to.eql([
+ 'example-root-profile',
+ 'example-data-source-profile',
+ ]);
+
+ // should reset the profiles when navigating away from Discover
+ await testSubjects.click('logo');
+ await retry.waitFor('home page to open', async () => {
+ return (await testSubjects.getVisibleText('euiBreadcrumb')) === 'Home';
+ });
+ await testSubjects.click('addSampleData');
+
+ await retry.try(async () => {
+ const eventsAfter = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['click'],
+ withTimeoutMs: 500,
+ });
+
+ expect(eventsAfter[eventsAfter.length - 1].context.discoverProfiles).to.eql([]);
+ });
+ });
+
+ it('should not set EBT context for embeddables', async () => {
+ await dashboard.navigateToApp();
+ await dashboard.gotoDashboardLandingPage();
+ await dashboard.clickNewDashboard();
+ await timePicker.setDefaultAbsoluteRange();
+ await ebtUIHelper.setOptIn(true);
+ await dashboardAddPanel.addSavedSearch('A Saved Search');
+ await header.waitUntilLoadingHasFinished();
+ await dashboard.waitForRenderComplete();
+ const rows = await dataGrid.getDocTableRows();
+ expect(rows.length).to.be.above(0);
+ await testSubjects.click('dashboardEditorMenuButton');
+
+ const events = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['click'],
+ withTimeoutMs: 500,
+ });
+
+ expect(
+ events.length > 0 &&
+ events.every((event) => !(event.context.discoverProfiles as string[])?.length)
+ ).to.be(true);
+ });
+ });
+
+ describe('events', () => {
+ beforeEach(async () => {
+ await common.navigateToApp('discover');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+ });
+
+ it('should track field usage when a field is added to the table', async () => {
+ await dataViews.switchToAndValidate('my-example-*');
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+ await ebtUIHelper.setOptIn(true);
+ await unifiedFieldList.clickFieldListItemAdd('service.name');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+
+ const [event] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event.properties).to.eql({
+ eventName: 'dataTableSelection',
+ fieldName: 'service.name',
+ });
+
+ await unifiedFieldList.clickFieldListItemAdd('_score');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+
+ const [_, event2] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event2.properties).to.eql({
+ eventName: 'dataTableSelection',
+ });
+ });
+
+ it('should track field usage when a field is removed from the table', async () => {
+ await dataViews.switchToAndValidate('my-example-logs');
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+ await ebtUIHelper.setOptIn(true);
+ await unifiedFieldList.clickFieldListItemRemove('log.level');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+
+ const [event] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event.properties).to.eql({
+ eventName: 'dataTableRemoval',
+ fieldName: 'log.level',
+ });
+ });
+
+ it('should track field usage when a filter is added', async () => {
+ await dataViews.switchToAndValidate('my-example-logs');
+ await discover.waitUntilSearchingHasFinished();
+ await ebtUIHelper.setOptIn(true);
+ await dataGrid.clickCellFilterForButtonExcludingControlColumns(0, 0);
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+
+ const [event] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event.properties).to.eql({
+ eventName: 'filterAddition',
+ fieldName: '@timestamp',
+ filterOperation: '+',
+ });
+
+ await unifiedFieldList.clickFieldListExistsFilter('log.level');
+
+ const [_, event2] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event2.properties).to.eql({
+ eventName: 'filterAddition',
+ fieldName: 'log.level',
+ filterOperation: '_exists_',
+ });
+ });
+
+ it('should track field usage for doc viewer too', async () => {
+ await dataViews.switchToAndValidate('my-example-logs');
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+ await ebtUIHelper.setOptIn(true);
+
+ await dataGrid.clickRowToggle();
+ await discover.isShowingDocViewer();
+
+ // event 1
+ await dataGrid.clickFieldActionInFlyout('service.name', 'toggleColumnButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ // event 2
+ await dataGrid.clickFieldActionInFlyout('log.level', 'toggleColumnButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ // event 3
+ await dataGrid.clickFieldActionInFlyout('log.level', 'addFilterOutValueButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ const [event1, event2, event3] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event1.properties).to.eql({
+ eventName: 'dataTableSelection',
+ fieldName: 'service.name',
+ });
+
+ expect(event2.properties).to.eql({
+ eventName: 'dataTableRemoval',
+ fieldName: 'log.level',
+ });
+
+ expect(event3.properties).to.eql({
+ eventName: 'filterAddition',
+ fieldName: 'log.level',
+ filterOperation: '-',
+ });
+ });
+
+ it('should track field usage on surrounding documents page', async () => {
+ await dataViews.switchToAndValidate('my-example-logs');
+ await discover.waitUntilSearchingHasFinished();
+ await unifiedFieldList.waitUntilSidebarHasLoaded();
+
+ await dataGrid.clickRowToggle({ rowIndex: 1 });
+ await discover.isShowingDocViewer();
+
+ const [, surroundingActionEl] = await dataGrid.getRowActions();
+ await surroundingActionEl.click();
+ await header.waitUntilLoadingHasFinished();
+ await ebtUIHelper.setOptIn(true);
+
+ await dataGrid.clickRowToggle({ rowIndex: 0 });
+ await discover.isShowingDocViewer();
+
+ // event 1
+ await dataGrid.clickFieldActionInFlyout('service.name', 'toggleColumnButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ // event 2
+ await dataGrid.clickFieldActionInFlyout('log.level', 'toggleColumnButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ // event 3
+ await dataGrid.clickFieldActionInFlyout('log.level', 'addFilterOutValueButton');
+ await header.waitUntilLoadingHasFinished();
+ await discover.waitUntilSearchingHasFinished();
+
+ const [event1, event2, event3] = await ebtUIHelper.getEvents(Number.MAX_SAFE_INTEGER, {
+ eventTypes: ['discover_field_usage'],
+ withTimeoutMs: 500,
+ });
+
+ expect(event1.properties).to.eql({
+ eventName: 'dataTableSelection',
+ fieldName: 'service.name',
+ });
+
+ expect(event2.properties).to.eql({
+ eventName: 'dataTableRemoval',
+ fieldName: 'log.level',
+ });
+
+ expect(event3.properties).to.eql({
+ eventName: 'filterAddition',
+ fieldName: 'log.level',
+ filterOperation: '-',
+ });
+
+ expect(event3.context.discoverProfiles).to.eql([
+ 'example-root-profile',
+ 'example-data-source-profile',
+ ]);
+ });
+ });
+ });
+}
diff --git a/test/functional/apps/discover/context_awareness/index.ts b/test/functional/apps/discover/context_awareness/index.ts
index 655f4460883d1..f937f38c741f9 100644
--- a/test/functional/apps/discover/context_awareness/index.ts
+++ b/test/functional/apps/discover/context_awareness/index.ts
@@ -38,6 +38,7 @@ export default function ({ getService, getPageObjects, loadTestFile }: FtrProvid
loadTestFile(require.resolve('./_root_profile'));
loadTestFile(require.resolve('./_data_source_profile'));
+ loadTestFile(require.resolve('./_telemetry'));
loadTestFile(require.resolve('./extensions/_get_row_indicator_provider'));
loadTestFile(require.resolve('./extensions/_get_row_additional_leading_controls'));
loadTestFile(require.resolve('./extensions/_get_doc_viewer'));
From f207c2c176ec6d96768f4fefec546596cce57463 Mon Sep 17 00:00:00 2001
From: Kurt
Date: Mon, 30 Sep 2024 12:34:04 -0400
Subject: [PATCH 022/107] ESLint Rule to discourage hashes being created with
unsafe algorithms (#190973)
Closes https://github.com/elastic/kibana/issues/185601
## Summary
Using non-compliant algorithms with Node Cryptos createHash function
will cause failures when running Kibana in FIPS mode.
We want to discourage usages of such algorithms.
---------
Co-authored-by: Sid
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Elastic Machine
---
.../src/bundle_routes/utils.ts | 2 +-
.../src/bootstrap/bootstrap_renderer.ts | 2 +-
.../src/get_migration_hash.ts | 2 +-
packages/kbn-es/src/install/install_source.ts | 4 +-
packages/kbn-eslint-config/.eslintrc.js | 1 +
packages/kbn-eslint-plugin-eslint/index.js | 1 +
.../rules/no_unsafe_hash.js | 166 ++++++++++++++++++
.../rules/no_unsafe_hash.test.js | 142 +++++++++++++++
.../report_failures_to_file.ts | 2 +-
.../kbn-optimizer/src/common/dll_manifest.ts | 2 +-
.../server/rest_api_routes/internal/fields.ts | 2 +-
.../server/routes/fullstory.ts | 2 +-
.../common/plugins/cases/server/routes.ts | 2 +-
13 files changed, 320 insertions(+), 10 deletions(-)
create mode 100644 packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.js
create mode 100644 packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.test.js
diff --git a/packages/core/apps/core-apps-server-internal/src/bundle_routes/utils.ts b/packages/core/apps/core-apps-server-internal/src/bundle_routes/utils.ts
index 05a31f85a51cc..ee115cda6e5b8 100644
--- a/packages/core/apps/core-apps-server-internal/src/bundle_routes/utils.ts
+++ b/packages/core/apps/core-apps-server-internal/src/bundle_routes/utils.ts
@@ -13,7 +13,7 @@ import * as Rx from 'rxjs';
import { map, takeUntil } from 'rxjs';
export const generateFileHash = (fd: number): Promise => {
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
const read = createReadStream(null as any, {
fd,
start: 0,
diff --git a/packages/core/rendering/core-rendering-server-internal/src/bootstrap/bootstrap_renderer.ts b/packages/core/rendering/core-rendering-server-internal/src/bootstrap/bootstrap_renderer.ts
index 757862d1d3c6c..8aa0d2a6c0387 100644
--- a/packages/core/rendering/core-rendering-server-internal/src/bootstrap/bootstrap_renderer.ts
+++ b/packages/core/rendering/core-rendering-server-internal/src/bootstrap/bootstrap_renderer.ts
@@ -114,7 +114,7 @@ export const bootstrapRendererFactory: BootstrapRendererFactory = ({
publicPathMap,
});
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
hash.update(body);
const etag = hash.digest('hex');
diff --git a/packages/core/test-helpers/core-test-helpers-so-type-serializer/src/get_migration_hash.ts b/packages/core/test-helpers/core-test-helpers-so-type-serializer/src/get_migration_hash.ts
index 461188703b3aa..c65f6330e176b 100644
--- a/packages/core/test-helpers/core-test-helpers-so-type-serializer/src/get_migration_hash.ts
+++ b/packages/core/test-helpers/core-test-helpers-so-type-serializer/src/get_migration_hash.ts
@@ -16,7 +16,7 @@ type SavedObjectTypeMigrationHash = string;
export const getMigrationHash = (soType: SavedObjectsType): SavedObjectTypeMigrationHash => {
const migInfo = extractMigrationInfo(soType);
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
const hashParts = [
migInfo.name,
diff --git a/packages/kbn-es/src/install/install_source.ts b/packages/kbn-es/src/install/install_source.ts
index 7dfbe8d7bd5b3..244b349002829 100644
--- a/packages/kbn-es/src/install/install_source.ts
+++ b/packages/kbn-es/src/install/install_source.ts
@@ -84,7 +84,7 @@ async function sourceInfo(cwd: string, license: string, log: ToolingLog = defaul
log.info('on %s at %s', chalk.bold(branch), chalk.bold(sha));
log.info('%s locally modified file(s)', chalk.bold(status.modified.length));
- const etag = crypto.createHash('md5').update(branch);
+ const etag = crypto.createHash('md5').update(branch); // eslint-disable-line @kbn/eslint/no_unsafe_hash
etag.update(sha);
// for changed files, use last modified times in hash calculation
@@ -92,7 +92,7 @@ async function sourceInfo(cwd: string, license: string, log: ToolingLog = defaul
etag.update(fs.statSync(path.join(cwd, file.path)).mtime.toString());
});
- const cwdHash = crypto.createHash('md5').update(cwd).digest('hex').substr(0, 8);
+ const cwdHash = crypto.createHash('md5').update(cwd).digest('hex').substr(0, 8); // eslint-disable-line @kbn/eslint/no_unsafe_hash
const basename = `${branch}-${task}-${cwdHash}`;
const filename = `${basename}.${ext}`;
diff --git a/packages/kbn-eslint-config/.eslintrc.js b/packages/kbn-eslint-config/.eslintrc.js
index a68dc6ecd949e..205e5b182e215 100644
--- a/packages/kbn-eslint-config/.eslintrc.js
+++ b/packages/kbn-eslint-config/.eslintrc.js
@@ -314,6 +314,7 @@ module.exports = {
'@kbn/eslint/no_constructor_args_in_property_initializers': 'error',
'@kbn/eslint/no_this_in_property_initializers': 'error',
'@kbn/eslint/no_unsafe_console': 'error',
+ '@kbn/eslint/no_unsafe_hash': 'error',
'@kbn/imports/no_unresolvable_imports': 'error',
'@kbn/imports/uniform_imports': 'error',
'@kbn/imports/no_unused_imports': 'error',
diff --git a/packages/kbn-eslint-plugin-eslint/index.js b/packages/kbn-eslint-plugin-eslint/index.js
index 1b9c04a2b7918..5ff3d70ae8a32 100644
--- a/packages/kbn-eslint-plugin-eslint/index.js
+++ b/packages/kbn-eslint-plugin-eslint/index.js
@@ -19,5 +19,6 @@ module.exports = {
no_constructor_args_in_property_initializers: require('./rules/no_constructor_args_in_property_initializers'),
no_this_in_property_initializers: require('./rules/no_this_in_property_initializers'),
no_unsafe_console: require('./rules/no_unsafe_console'),
+ no_unsafe_hash: require('./rules/no_unsafe_hash'),
},
};
diff --git a/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.js b/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.js
new file mode 100644
index 0000000000000..2088c196ddd60
--- /dev/null
+++ b/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.js
@@ -0,0 +1,166 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+const allowedAlgorithms = ['sha256', 'sha3-256', 'sha512'];
+
+module.exports = {
+ allowedAlgorithms,
+ meta: {
+ type: 'problem',
+ docs: {
+ description: 'Allow usage of createHash only with allowed algorithms.',
+ category: 'FIPS',
+ recommended: false,
+ },
+ messages: {
+ noDisallowedHash:
+ 'Usage of {{functionName}} with "{{algorithm}}" is not allowed. Only the following algorithms are allowed: [{{allowedAlgorithms}}]. If you need to use a different algorithm, please contact the Kibana security team.',
+ },
+ schema: [],
+ },
+ create(context) {
+ let isCreateHashImported = false;
+ let createHashName = 'createHash';
+ let cryptoLocalName = 'crypto';
+ let usedFunctionName = '';
+ const sourceCode = context.getSourceCode();
+
+ const disallowedAlgorithmNodes = new Set();
+
+ function isAllowedAlgorithm(algorithm) {
+ return allowedAlgorithms.includes(algorithm);
+ }
+
+ function isHashOrCreateHash(value) {
+ if (value === 'hash' || value === 'createHash') {
+ usedFunctionName = value;
+ return true;
+ }
+ return false;
+ }
+
+ function getIdentifierValue(node) {
+ const scope = sourceCode.getScope(node);
+ if (!scope) {
+ return;
+ }
+ const variable = scope.variables.find((variable) => variable.name === node.name);
+ if (variable && variable.defs.length > 0) {
+ const def = variable.defs[0];
+ if (
+ def.node.init &&
+ def.node.init.type === 'Literal' &&
+ !isAllowedAlgorithm(def.node.init.value)
+ ) {
+ disallowedAlgorithmNodes.add(node.name);
+ return def.node.init.value;
+ }
+ }
+ }
+
+ return {
+ ImportDeclaration(node) {
+ if (node.source.value === 'crypto' || node.source.value === 'node:crypto') {
+ node.specifiers.forEach((specifier) => {
+ if (
+ specifier.type === 'ImportSpecifier' &&
+ isHashOrCreateHash(specifier.imported.name)
+ ) {
+ isCreateHashImported = true;
+ createHashName = specifier.local.name; // Capture local name (renamed or not)
+ } else if (specifier.type === 'ImportDefaultSpecifier') {
+ cryptoLocalName = specifier.local.name;
+ }
+ });
+ }
+ },
+ VariableDeclarator(node) {
+ if (node.init && node.init.type === 'Literal' && !isAllowedAlgorithm(node.init.value)) {
+ disallowedAlgorithmNodes.add(node.id.name);
+ }
+ },
+ AssignmentExpression(node) {
+ if (
+ node.right.type === 'Literal' &&
+ node.right.value === 'md5' &&
+ node.left.type === 'Identifier'
+ ) {
+ disallowedAlgorithmNodes.add(node.left.name);
+ }
+ },
+ CallExpression(node) {
+ const callee = node.callee;
+
+ if (
+ callee.type === 'MemberExpression' &&
+ callee.object.name === cryptoLocalName &&
+ isHashOrCreateHash(callee.property.name)
+ ) {
+ const arg = node.arguments[0];
+ if (arg) {
+ if (arg.type === 'Literal' && !isAllowedAlgorithm(arg.value)) {
+ context.report({
+ node,
+ messageId: 'noDisallowedHash',
+ data: {
+ algorithm: arg.value,
+ allowedAlgorithms: allowedAlgorithms.join(', '),
+ functionName: usedFunctionName,
+ },
+ });
+ } else if (arg.type === 'Identifier') {
+ const identifierValue = getIdentifierValue(arg);
+ if (disallowedAlgorithmNodes.has(arg.name) && identifierValue) {
+ context.report({
+ node,
+ messageId: 'noDisallowedHash',
+ data: {
+ algorithm: identifierValue,
+ allowedAlgorithms: allowedAlgorithms.join(', '),
+ functionName: usedFunctionName,
+ },
+ });
+ }
+ }
+ }
+ }
+
+ if (isCreateHashImported && callee.name === createHashName) {
+ const arg = node.arguments[0];
+ if (arg) {
+ if (arg.type === 'Literal' && !isAllowedAlgorithm(arg.value)) {
+ context.report({
+ node,
+ messageId: 'noDisallowedHash',
+ data: {
+ algorithm: arg.value,
+ allowedAlgorithms: allowedAlgorithms.join(', '),
+ functionName: usedFunctionName,
+ },
+ });
+ } else if (arg.type === 'Identifier') {
+ const identifierValue = getIdentifierValue(arg);
+ if (disallowedAlgorithmNodes.has(arg.name) && identifierValue) {
+ context.report({
+ node,
+ messageId: 'noDisallowedHash',
+ data: {
+ algorithm: identifierValue,
+ allowedAlgorithms: allowedAlgorithms.join(', '),
+ functionName: usedFunctionName,
+ },
+ });
+ }
+ }
+ }
+ }
+ },
+ };
+ },
+};
diff --git a/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.test.js b/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.test.js
new file mode 100644
index 0000000000000..d384ea40819eb
--- /dev/null
+++ b/packages/kbn-eslint-plugin-eslint/rules/no_unsafe_hash.test.js
@@ -0,0 +1,142 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+const { RuleTester } = require('eslint');
+const { allowedAlgorithms, ...rule } = require('./no_unsafe_hash');
+
+const dedent = require('dedent');
+
+const joinedAllowedAlgorithms = `[${allowedAlgorithms.join(', ')}]`;
+
+const ruleTester = new RuleTester({
+ parser: require.resolve('@typescript-eslint/parser'),
+ parserOptions: {
+ sourceType: 'module',
+ ecmaVersion: 2018,
+ ecmaFeatures: {
+ jsx: true,
+ },
+ },
+});
+
+ruleTester.run('@kbn/eslint/no_unsafe_hash', rule, {
+ valid: [
+ // valid import of crypto and call of createHash
+ {
+ code: dedent`
+ import crypto from 'crypto';
+ crypto.createHash('sha256');
+ `,
+ },
+ // valid import and call of createHash
+ {
+ code: dedent`
+ import { createHash } from 'crypto';
+ createHash('sha256');
+ `,
+ },
+ // valid import and call of createHash with a variable containing a compliant aglorithm
+ {
+ code: dedent`
+ import { createHash } from 'crypto';
+ const myHash = 'sha256';
+ createHash(myHash);
+ `,
+ },
+ // valid import and call of hash with a variable containing a compliant aglorithm
+ {
+ code: dedent`
+ import { hash } from 'crypto';
+ const myHash = 'sha256';
+ hash(myHash);
+ `,
+ },
+ ],
+
+ invalid: [
+ // invalid call of createHash when calling from crypto
+ {
+ code: dedent`
+ import crypto from 'crypto';
+ crypto.createHash('md5');
+ `,
+ errors: [
+ {
+ line: 2,
+ message: `Usage of createHash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+ // invalid call of createHash when importing directly
+ {
+ code: dedent`
+ import { createHash } from 'crypto';
+ createHash('md5');
+ `,
+ errors: [
+ {
+ line: 2,
+ message: `Usage of createHash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+ // invalid call of createHash when calling with a variable containing md5
+ {
+ code: dedent`
+ import { createHash } from 'crypto';
+ const myHash = 'md5';
+ createHash(myHash);
+ `,
+ errors: [
+ {
+ line: 3,
+ message: `Usage of createHash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+ // invalid import and call of hash when importing directly
+ {
+ code: dedent`
+ import { hash } from 'crypto';
+ hash('md5');
+ `,
+ errors: [
+ {
+ line: 2,
+ message: `Usage of hash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+ {
+ code: dedent`
+ import _crypto from 'crypto';
+ _crypto.hash('md5');
+ `,
+ errors: [
+ {
+ line: 2,
+ message: `Usage of hash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+
+ {
+ code: dedent`
+ import { hash as _hash } from 'crypto';
+ _hash('md5');
+ `,
+ errors: [
+ {
+ line: 2,
+ message: `Usage of hash with "md5" is not allowed. Only the following algorithms are allowed: ${joinedAllowedAlgorithms}. If you need to use a different algorithm, please contact the Kibana security team.`,
+ },
+ ],
+ },
+ ],
+});
diff --git a/packages/kbn-failed-test-reporter-cli/failed_tests_reporter/report_failures_to_file.ts b/packages/kbn-failed-test-reporter-cli/failed_tests_reporter/report_failures_to_file.ts
index 7876efb8502a5..b1e3997ebf030 100644
--- a/packages/kbn-failed-test-reporter-cli/failed_tests_reporter/report_failures_to_file.ts
+++ b/packages/kbn-failed-test-reporter-cli/failed_tests_reporter/report_failures_to_file.ts
@@ -127,7 +127,7 @@ export async function reportFailuresToFile(
// Jest could, in theory, fail 1000s of tests and write 1000s of failures
// So let's just write files for the first 20
for (const failure of failures.slice(0, 20)) {
- const hash = createHash('md5').update(failure.name).digest('hex');
+ const hash = createHash('md5').update(failure.name).digest('hex'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
const filenameBase = `${
process.env.BUILDKITE_JOB_ID ? process.env.BUILDKITE_JOB_ID + '_' : ''
}${hash}`;
diff --git a/packages/kbn-optimizer/src/common/dll_manifest.ts b/packages/kbn-optimizer/src/common/dll_manifest.ts
index 0a5bebefdeca5..fc8c597110156 100644
--- a/packages/kbn-optimizer/src/common/dll_manifest.ts
+++ b/packages/kbn-optimizer/src/common/dll_manifest.ts
@@ -20,7 +20,7 @@ export interface ParsedDllManifest {
}
const hash = (s: string) => {
- return Crypto.createHash('sha1').update(s).digest('base64').replace(/=+$/, '');
+ return Crypto.createHash('sha1').update(s).digest('base64').replace(/=+$/, ''); // eslint-disable-line @kbn/eslint/no_unsafe_hash
};
export function parseDllManifest(manifest: DllManifest): ParsedDllManifest {
diff --git a/src/plugins/data_views/server/rest_api_routes/internal/fields.ts b/src/plugins/data_views/server/rest_api_routes/internal/fields.ts
index 7b13704f3c50a..0d8f8b4dd67b5 100644
--- a/src/plugins/data_views/server/rest_api_routes/internal/fields.ts
+++ b/src/plugins/data_views/server/rest_api_routes/internal/fields.ts
@@ -21,7 +21,7 @@ import { parseFields, IBody, IQuery, querySchema, validate } from './fields_for'
import { DEFAULT_FIELD_CACHE_FRESHNESS } from '../../constants';
export function calculateHash(srcBuffer: Buffer) {
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
hash.update(srcBuffer);
return hash.digest('hex');
}
diff --git a/x-pack/plugins/cloud_integrations/cloud_full_story/server/routes/fullstory.ts b/x-pack/plugins/cloud_integrations/cloud_full_story/server/routes/fullstory.ts
index 03e38baee4e91..d983191c726df 100644
--- a/x-pack/plugins/cloud_integrations/cloud_full_story/server/routes/fullstory.ts
+++ b/x-pack/plugins/cloud_integrations/cloud_full_story/server/routes/fullstory.ts
@@ -26,7 +26,7 @@ export const renderFullStoryLibraryFactory = (dist = true) =>
headers: HttpResponseOptions['headers'];
}> => {
const srcBuffer = await fs.readFile(FULLSTORY_LIBRARY_PATH);
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
hash.update(srcBuffer);
const hashDigest = hash.digest('hex');
diff --git a/x-pack/test/cases_api_integration/common/plugins/cases/server/routes.ts b/x-pack/test/cases_api_integration/common/plugins/cases/server/routes.ts
index 10139f636c809..3269f9f059446 100644
--- a/x-pack/test/cases_api_integration/common/plugins/cases/server/routes.ts
+++ b/x-pack/test/cases_api_integration/common/plugins/cases/server/routes.ts
@@ -19,7 +19,7 @@ import { CASES_TELEMETRY_TASK_NAME } from '@kbn/cases-plugin/common/constants';
import type { FixtureStartDeps } from './plugin';
const hashParts = (parts: string[]): string => {
- const hash = createHash('sha1');
+ const hash = createHash('sha1'); // eslint-disable-line @kbn/eslint/no_unsafe_hash
const hashFeed = parts.join('-');
return hash.update(hashFeed).digest('hex');
};
From fefa59f41206c534297813af2cb6f732c2c59aeb Mon Sep 17 00:00:00 2001
From: Davis Plumlee <56367316+dplumlee@users.noreply.github.com>
Date: Mon, 30 Sep 2024 12:37:29 -0400
Subject: [PATCH 023/107] [Security Solution] Test plan for rule `type` field
diff algorithm (#193372)
## Summary
Related ticket: https://github.com/elastic/kibana/issues/190482
Adds test plan for diff algorithm for `type` field diff algorithm
implemented here: https://github.com/elastic/kibana/pull/193369
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
---
.../upgrade_review_algorithms.md | 117 ++++++++++++++++--
1 file changed, 109 insertions(+), 8 deletions(-)
diff --git a/x-pack/plugins/security_solution/docs/testing/test_plans/detection_response/prebuilt_rules/upgrade_review_algorithms.md b/x-pack/plugins/security_solution/docs/testing/test_plans/detection_response/prebuilt_rules/upgrade_review_algorithms.md
index e65d366e0f44c..c4a39a994144f 100644
--- a/x-pack/plugins/security_solution/docs/testing/test_plans/detection_response/prebuilt_rules/upgrade_review_algorithms.md
+++ b/x-pack/plugins/security_solution/docs/testing/test_plans/detection_response/prebuilt_rules/upgrade_review_algorithms.md
@@ -17,12 +17,16 @@ Status: `in progress`.
- [Rule field doesn't have an update and has no custom value - `AAA`](#rule-field-doesnt-have-an-update-and-has-no-custom-value---aaa)
- [**Scenario: `AAA` - Rule field is any type**](#scenario-aaa---rule-field-is-any-type)
- [Rule field doesn't have an update but has a custom value - `ABA`](#rule-field-doesnt-have-an-update-but-has-a-custom-value---aba)
- - [**Scenario: `ABA` - Rule field is any type**](#scenario-aba---rule-field-is-any-type)
+ - [**Scenario: `ABA` - Rule field is any type except rule `type`**](#scenario-aba---rule-field-is-any-type-except-rule-type)
+ - [**Scenario: `ABA` - Rule field is rule `type`**](#scenario-aba---rule-field-is-rule-type)
- [Rule field has an update and doesn't have a custom value - `AAB`](#rule-field-has-an-update-and-doesnt-have-a-custom-value---aab)
- - [**Scenario: `AAB` - Rule field is any type**](#scenario-aab---rule-field-is-any-type)
+ - [**Scenario: `AAB` - Rule field is any type except rule `type`**](#scenario-aab---rule-field-is-any-type-except-rule-type)
+ - [**Scenario: `AAB` - Rule field is rule `type`**](#scenario-aab---rule-field-is-rule-type)
- [Rule field has an update and a custom value that are the same - `ABB`](#rule-field-has-an-update-and-a-custom-value-that-are-the-same---abb)
- - [**Scenario: `ABB` - Rule field is any type**](#scenario-abb---rule-field-is-any-type)
+ - [**Scenario: `ABB` - Rule field is any type except rule `type`**](#scenario-abb---rule-field-is-any-type-except-rule-type)
+ - [**Scenario: `ABB` - Rule field is rule `type`**](#scenario-abb---rule-field-is-rule-type)
- [Rule field has an update and a custom value that are NOT the same - `ABC`](#rule-field-has-an-update-and-a-custom-value-that-are-not-the-same---abc)
+ - [**Scenario: `ABC` - Rule field is rule `type`**](#scenario-abc---rule-field-is-rule-type)
- [**Scenario: `ABC` - Rule field is a number or single line string**](#scenario-abc---rule-field-is-a-number-or-single-line-string)
- [**Scenario: `ABC` - Rule field is a mergeable multi line string**](#scenario-abc---rule-field-is-a-mergeable-multi-line-string)
- [**Scenario: `ABC` - Rule field is a non-mergeable multi line string**](#scenario-abc---rule-field-is-a-non-mergeable-multi-line-string)
@@ -37,6 +41,7 @@ Status: `in progress`.
- [**Scenario: `-AB` - Rule field is an array of scalar values**](#scenario--ab---rule-field-is-an-array-of-scalar-values)
- [**Scenario: `-AB` - Rule field is a solvable `data_source` object**](#scenario--ab---rule-field-is-a-solvable-data_source-object)
- [**Scenario: `-AB` - Rule field is a non-solvable `data_source` object**](#scenario--ab---rule-field-is-a-non-solvable-data_source-object)
+ - [**Scenario: `-AB` - Rule field is rule `type`**](#scenario--ab---rule-field-is-rule-type)
## Useful information
@@ -74,7 +79,7 @@ Status: `in progress`.
#### **Scenario: `AAA` - Rule field is any type**
-**Automation**: 10 integration tests with mock rules + a set of unit tests for each algorithm
+**Automation**: 11 integration tests with mock rules + a set of unit tests for each algorithm
```Gherkin
Given field is not customized by the user (current version == base version)
@@ -85,6 +90,7 @@ And field should not be shown in the upgrade preview UI
Examples:
| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | "query" | "query" | "query" | "query" |
| single line string | name | "A" | "A" | "A" | "A" |
| multi line string | description | "My description.\nThis is a second line." | "My description.\nThis is a second line." | "My description.\nThis is a second line." | "My description.\nThis is a second line." |
| number | risk_score | 1 | 1 | 1 | 1 |
@@ -99,7 +105,7 @@ Examples:
### Rule field doesn't have an update but has a custom value - `ABA`
-#### **Scenario: `ABA` - Rule field is any type**
+#### **Scenario: `ABA` - Rule field is any type except rule `type`**
**Automation**: 10 integration tests with mock rules + a set of unit tests for each algorithm
@@ -124,9 +130,27 @@ Examples:
| esql_query | esql_query | {query: "FROM query WHERE true", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} | {query: "FROM query WHERE true", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} |
```
+#### **Scenario: `ABA` - Rule field is rule `type`**
+
+**Automation**: 1 integration test with mock rules + a set of unit tests for each algorithm
+
+```Gherkin
+Given field is customized by the user (current version != base version)
+And field is not updated by Elastic in this upgrade (target version == base version)
+Then for field the diff algorithm should output the target version as the merged one with a non-solvable conflict
+And field should be returned from the `upgrade/_review` API endpoint
+And field should be shown in the upgrade preview UI
+
+Examples:
+| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | "query" | "saved_query" | "query" | "query" |
+```
+
+Notes: `type` field can only be changed between `query` and `saved_query` rule types in the UI and API via normal conventions, but the logic for others is still covered
+
### Rule field has an update and doesn't have a custom value - `AAB`
-#### **Scenario: `AAB` - Rule field is any type**
+#### **Scenario: `AAB` - Rule field is any type except rule `type`**
**Automation**: 10 integration tests with mock rules + a set of unit tests for each algorithm
@@ -151,9 +175,27 @@ Examples:
| esql_query | esql_query | {query: "FROM query WHERE true", language: "esql"} | {query: "FROM query WHERE true", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} |
```
+#### **Scenario: `AAB` - Rule field is rule `type`**
+
+**Automation**: 1 integration test with mock rules + a set of unit tests for each algorithm
+
+```Gherkin
+Given field is not customized by the user (current version == base version)
+And field is updated by Elastic in this upgrade (target version != base version)
+Then for field the diff algorithm should output the target version as the merged one with a non-solvable conflict
+And field should be returned from the `upgrade/_review` API endpoint
+And field should be shown in the upgrade preview UI
+
+Examples:
+| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | "query" | "query" | "saved_query" | "saved_query" |
+```
+
+Notes: `type` field can only be changed between `query` and `saved_query` rule types in the UI and API via normal conventions, but the logic for others is still covered
+
### Rule field has an update and a custom value that are the same - `ABB`
-#### **Scenario: `ABB` - Rule field is any type**
+#### **Scenario: `ABB` - Rule field is any type except rule `type`**
**Automation**: 10 integration tests with mock rules + a set of unit tests for each algorithm
@@ -179,8 +221,46 @@ Examples:
| esql_query | esql_query | {query: "FROM query WHERE true", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} | {query: "FROM query WHERE false", language: "esql"} |
```
+#### **Scenario: `ABB` - Rule field is rule `type`**
+
+**Automation**: 1 integration test with mock rules + a set of unit tests for each algorithm
+
+```Gherkin
+Given field is customized by the user (current version != base version)
+And field is updated by Elastic in this upgrade (target version != base version)
+And customized field is the same as the Elastic update in this upgrade (current version == target version)
+Then for field the diff algorithm should output the target version as the merged one with a non-solvable conflict
+And field should be returned from the `upgrade/_review` API endpoint
+And field should be shown in the upgrade preview UI
+
+Examples:
+| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | "query" | "saved_query" | "saved_query" | "saved_query" |
+```
+
+Notes: `type` field can only be changed between `query` and `saved_query` rule types in the UI and API via normal conventions, but the logic for others is still covered
+
### Rule field has an update and a custom value that are NOT the same - `ABC`
+#### **Scenario: `ABC` - Rule field is rule `type`**
+
+**Automation**: 1 integration test with mock rules + a set of unit tests for the algorithms
+
+```Gherkin
+Given field is customized by the user (current version != base version)
+And field is updated by Elastic in this upgrade (target version != base version)
+And customized field is different than the Elastic update in this upgrade (current version != target version)
+Then for field the diff algorithm should output the target version as the merged one with a non-solvable conflict
+And field should be returned from the `upgrade/_review` API endpoint
+And field should be shown in the upgrade preview UI
+
+Examples:
+| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | "query" | "saved_query" | "threshold" | "threshold" |
+```
+
+Notes: `type` field can only be changed between `query` and `saved_query` rule types in the UI and API via normal conventions, but the logic for others is still covered. This test case scenario cannot currently be reached.
+
#### **Scenario: `ABC` - Rule field is a number or single line string**
**Automation**: 2 integration tests with mock rules + a set of unit tests for the algorithms
@@ -328,7 +408,7 @@ Examples:
#### **Scenario: `-AA` - Rule field is any type**
-**Automation**: 9 integration tests with mock rules + a set of unit tests for each algorithm
+**Automation**: 11 integration tests with mock rules + a set of unit tests for each algorithm
```Gherkin
Given at least 1 installed prebuilt rule has a new version available
@@ -340,6 +420,7 @@ And field should not be shown in the upgrade preview UI
Examples:
| algorithm | field_name | base_version | current_version | target_version | merged_version |
+| rule type | type | N/A | "query" | "query" | "query" |
| single line string | name | N/A | "A" | "A" | "A" |
| multi line string | description | N/A | "My description.\nThis is a second line." | "My description.\nThis is a second line." | "My description.\nThis is a second line." |
| number | risk_score | N/A | 1 | 1 | 1 |
@@ -438,3 +519,23 @@ Examples:
| algorithm | base_version | current_version | target_version | merged_version |
| data_source | N/A | {type: "index_patterns", "index_patterns": ["one", "two", "three"]} | {type: "data_view", "data_view_id": "A"} | {type: "data_view", "data_view_id": "A"} |
```
+
+#### **Scenario: `-AB` - Rule field is rule `type`**
+
+**Automation**: 1 integration test with mock rules + a set of unit tests for the algorithm
+
+```Gherkin
+Given at least 1 installed prebuilt rule has a new version available
+And the base version of the rule cannot be determined
+And customized data_source field is different than the Elastic update in this upgrade (current version != target version)
+And current version and target version are not both array fields in data_source
+Then for data_source field the diff algorithm should output the target version as the merged version with a non-solvable conflict
+And data_source field should be returned from the `upgrade/_review` API endpoint
+And data_source field should be shown in the upgrade preview UI
+
+Examples:
+| algorithm | base_version | current_version | target_version | merged_version |
+| rule type | N/A | "query" | "saved_query" | "saved_query" |
+```
+
+Notes: `type` field can only be changed between `query` and `saved_query` rule types in the UI and API via normal conventions, but the logic for others is still covered
From c1f72d71cade7423ab5ab5d3ed68f4aa086f5faf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Cau=C3=AA=20Marcondes?=
<55978943+cauemarcondes@users.noreply.github.com>
Date: Mon, 30 Sep 2024 18:19:19 +0100
Subject: [PATCH 024/107] [Inventory] Typing entities (#194431)
Adds typescript to the Entity type.
It correctly infers the type based on the entity.type field.
---
.../inventory/common/entities.ts | 42 +++++++++++++++++++
.../entities_grid/entities_grid.stories.tsx | 2 +-
.../entities_grid/mock/entities_mock.ts | 4 +-
.../routes/entities/get_latest_entities.ts | 19 ++-------
.../common/field_names/elasticsearch.ts | 1 +
.../observability_shared/common/index.ts | 1 +
6 files changed, 50 insertions(+), 19 deletions(-)
diff --git a/x-pack/plugins/observability_solution/inventory/common/entities.ts b/x-pack/plugins/observability_solution/inventory/common/entities.ts
index 2135688d75467..5dec3420ee005 100644
--- a/x-pack/plugins/observability_solution/inventory/common/entities.ts
+++ b/x-pack/plugins/observability_solution/inventory/common/entities.ts
@@ -7,6 +7,19 @@
import * as t from 'io-ts';
import { ENTITY_LATEST, entitiesAliasPattern } from '@kbn/entities-schema';
import { isRight } from 'fp-ts/lib/Either';
+import {
+ SERVICE_ENVIRONMENT,
+ SERVICE_NAME,
+ CONTAINER_ID,
+ HOST_NAME,
+} from '@kbn/observability-shared-plugin/common';
+import {
+ ENTITY_DEFINITION_ID,
+ ENTITY_DISPLAY_NAME,
+ ENTITY_ID,
+ ENTITY_LAST_SEEN,
+ ENTITY_TYPE,
+} from './es_fields/entities';
export const entityTypeRt = t.union([
t.literal('service'),
@@ -57,3 +70,32 @@ export const entityTypesRt = new t.Type(
},
(arr) => arr.join()
);
+
+interface BaseEntity {
+ [ENTITY_LAST_SEEN]: string;
+ [ENTITY_ID]: string;
+ [ENTITY_TYPE]: EntityType;
+ [ENTITY_DISPLAY_NAME]: string;
+ [ENTITY_DEFINITION_ID]: string;
+}
+
+/**
+ * These types are based on service, host and container from the built in definition.
+ */
+interface ServiceEntity extends BaseEntity {
+ [ENTITY_TYPE]: 'service';
+ [SERVICE_NAME]: string;
+ [SERVICE_ENVIRONMENT]?: string | null;
+}
+
+interface HostEntity extends BaseEntity {
+ [ENTITY_TYPE]: 'host';
+ [HOST_NAME]: string;
+}
+
+interface ContainerEntity extends BaseEntity {
+ [ENTITY_TYPE]: 'container';
+ [CONTAINER_ID]: string;
+}
+
+export type Entity = ServiceEntity | HostEntity | ContainerEntity;
diff --git a/x-pack/plugins/observability_solution/inventory/public/components/entities_grid/entities_grid.stories.tsx b/x-pack/plugins/observability_solution/inventory/public/components/entities_grid/entities_grid.stories.tsx
index 996f0ec951581..f05bf920a1845 100644
--- a/x-pack/plugins/observability_solution/inventory/public/components/entities_grid/entities_grid.stories.tsx
+++ b/x-pack/plugins/observability_solution/inventory/public/components/entities_grid/entities_grid.stories.tsx
@@ -40,7 +40,7 @@ export const Example: Story<{}> = () => {
);
return (
-
+
{`Entity filter: ${selectedEntityType || 'N/A'}`}
;
-export const entitiesMock: InventoryEntitiesAPIReturnType['entities'] = [
+export const entitiesMock = [
{
'entity.lastSeenTimestamp': '2023-08-20T10:50:06.384Z',
'entity.type': 'host',
@@ -3011,4 +3011,4 @@ export const entitiesMock: InventoryEntitiesAPIReturnType['entities'] = [
'entity.displayName': 'Troy McClure',
'entity.id': '499',
},
-];
+] as unknown as InventoryEntitiesAPIReturnType['entities'];
diff --git a/x-pack/plugins/observability_solution/inventory/server/routes/entities/get_latest_entities.ts b/x-pack/plugins/observability_solution/inventory/server/routes/entities/get_latest_entities.ts
index be909308e49c3..853d52d8401a9 100644
--- a/x-pack/plugins/observability_solution/inventory/server/routes/entities/get_latest_entities.ts
+++ b/x-pack/plugins/observability_solution/inventory/server/routes/entities/get_latest_entities.ts
@@ -12,22 +12,10 @@ import {
ENTITIES_LATEST_ALIAS,
MAX_NUMBER_OF_ENTITIES,
type EntityType,
+ Entity,
} from '../../../common/entities';
-import {
- ENTITY_DISPLAY_NAME,
- ENTITY_ID,
- ENTITY_LAST_SEEN,
- ENTITY_TYPE,
-} from '../../../common/es_fields/entities';
import { getEntityDefinitionIdWhereClause, getEntityTypesWhereClause } from './query_helper';
-export interface LatestEntity {
- [ENTITY_LAST_SEEN]: string;
- [ENTITY_TYPE]: string;
- [ENTITY_DISPLAY_NAME]: string;
- [ENTITY_ID]: string;
-}
-
export async function getLatestEntities({
inventoryEsClient,
sortDirection,
@@ -47,8 +35,7 @@ export async function getLatestEntities({
| ${getEntityDefinitionIdWhereClause()}
| SORT ${sortField} ${sortDirection}
| LIMIT ${MAX_NUMBER_OF_ENTITIES}
- | KEEP ${ENTITY_LAST_SEEN}, ${ENTITY_TYPE}, ${ENTITY_DISPLAY_NAME}, ${ENTITY_ID}
- `,
+ `,
filter: {
bool: {
filter: [...kqlQuery(kuery)],
@@ -56,5 +43,5 @@ export async function getLatestEntities({
},
});
- return esqlResultToPlainObjects(latestEntitiesEsqlResponse);
+ return esqlResultToPlainObjects(latestEntitiesEsqlResponse);
}
diff --git a/x-pack/plugins/observability_solution/observability_shared/common/field_names/elasticsearch.ts b/x-pack/plugins/observability_solution/observability_shared/common/field_names/elasticsearch.ts
index 35873a31150ac..afaf78ef1aa9b 100644
--- a/x-pack/plugins/observability_solution/observability_shared/common/field_names/elasticsearch.ts
+++ b/x-pack/plugins/observability_solution/observability_shared/common/field_names/elasticsearch.ts
@@ -107,6 +107,7 @@ export const LABEL_NAME = 'labels.name';
export const HOST = 'host';
export const HOST_HOSTNAME = 'host.hostname';
+export const HOST_NAME = 'host.name';
export const HOST_OS_PLATFORM = 'host.os.platform';
export const CONTAINER_ID = 'container.id';
export const KUBERNETES = 'kubernetes';
diff --git a/x-pack/plugins/observability_solution/observability_shared/common/index.ts b/x-pack/plugins/observability_solution/observability_shared/common/index.ts
index d13e2b32839d6..e14bbb4139176 100644
--- a/x-pack/plugins/observability_solution/observability_shared/common/index.ts
+++ b/x-pack/plugins/observability_solution/observability_shared/common/index.ts
@@ -98,6 +98,7 @@ export {
LABEL_NAME,
HOST,
HOST_HOSTNAME,
+ HOST_NAME,
HOST_OS_PLATFORM,
CONTAINER_ID,
KUBERNETES,
From 18465e7f7e5d9912e61da68873045f0db984fa2b Mon Sep 17 00:00:00 2001
From: Davis Plumlee <56367316+dplumlee@users.noreply.github.com>
Date: Mon, 30 Sep 2024 13:27:29 -0400
Subject: [PATCH 025/107] [Security Solution] Rule `type` field diff algorithm
(#193369)
## Summary
Addresses https://github.com/elastic/kibana/issues/190482
Adds the diff algorithm implementation for the prebuilt rule `type`
field. Returns `target_version` and a `NON_SOLVABLE` conflict for every
outcome that changes the field.
### Checklist
Delete any items that are not applicable to this PR.
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
---
.../diff/calculation/algorithms/index.ts | 1 +
.../rule_type_diff_algorithm.test.ts | 165 ++++++++++++++++++
.../algorithms/rule_type_diff_algorithm.ts | 98 +++++++++++
3 files changed, 264 insertions(+)
create mode 100644 x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.test.ts
create mode 100644 x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.ts
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/index.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/index.ts
index 629f329c72b9b..c8b55a49edc00 100644
--- a/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/index.ts
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/index.ts
@@ -14,3 +14,4 @@ export { dataSourceDiffAlgorithm } from './data_source_diff_algorithm';
export { kqlQueryDiffAlgorithm } from './kql_query_diff_algorithm';
export { eqlQueryDiffAlgorithm } from './eql_query_diff_algorithm';
export { esqlQueryDiffAlgorithm } from './esql_query_diff_algorithm';
+export { ruleTypeDiffAlgorithm } from './rule_type_diff_algorithm';
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.test.ts
new file mode 100644
index 0000000000000..accf133ac71b3
--- /dev/null
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.test.ts
@@ -0,0 +1,165 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import type {
+ DiffableRuleTypes,
+ ThreeVersionsOf,
+} from '../../../../../../../../common/api/detection_engine';
+import {
+ ThreeWayDiffOutcome,
+ ThreeWayMergeOutcome,
+ MissingVersion,
+ ThreeWayDiffConflict,
+} from '../../../../../../../../common/api/detection_engine';
+import { ruleTypeDiffAlgorithm } from './rule_type_diff_algorithm';
+
+describe('ruleTypeDiffAlgorithm', () => {
+ it('returns current_version as merged output if there is no update - scenario AAA', () => {
+ const mockVersions: ThreeVersionsOf = {
+ base_version: 'query',
+ current_version: 'query',
+ target_version: 'query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.StockValueNoUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NONE,
+ })
+ );
+ });
+
+ it('returns current_version as merged output if current_version is different and there is no update - scenario ABA', () => {
+ // User can change rule type field between `query` and `saved_query` in the UI, no other rule types
+ const mockVersions: ThreeVersionsOf = {
+ base_version: 'query',
+ current_version: 'saved_query',
+ target_version: 'query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.CustomizedValueNoUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ })
+ );
+ });
+
+ it('returns target_version as merged output if current_version is the same and there is an update - scenario AAB', () => {
+ // User can change rule type field between `query` and `saved_query` in the UI, no other rule types
+ const mockVersions: ThreeVersionsOf = {
+ base_version: 'query',
+ current_version: 'query',
+ target_version: 'saved_query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.StockValueCanUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ })
+ );
+ });
+
+ it('returns current_version as merged output if current version is different but it matches the update - scenario ABB', () => {
+ // User can change rule type field between `query` and `saved_query` in the UI, no other rule types
+ const mockVersions: ThreeVersionsOf = {
+ base_version: 'query',
+ current_version: 'saved_query',
+ target_version: 'saved_query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.CustomizedValueSameUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ })
+ );
+ });
+
+ it('returns current_version as merged output if all three versions are different - scenario ABC', () => {
+ // User can change rule type field between `query` and `saved_query` in the UI, no other rule types
+ // NOTE: This test case scenario is currently inaccessible via normal UI or API workflows, but the logic is covered just in case
+ const mockVersions: ThreeVersionsOf = {
+ base_version: 'query',
+ current_version: 'eql',
+ target_version: 'saved_query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.CustomizedValueCanUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ })
+ );
+ });
+
+ describe('if base_version is missing', () => {
+ it('returns current_version as merged output if current_version and target_version are the same - scenario -AA', () => {
+ const mockVersions: ThreeVersionsOf = {
+ base_version: MissingVersion,
+ current_version: 'query',
+ target_version: 'query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ has_base_version: false,
+ base_version: undefined,
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.MissingBaseNoUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NONE,
+ })
+ );
+ });
+
+ it('returns target_version as merged output if current_version and target_version are different - scenario -AB', () => {
+ // User can change rule type field between `query` and `saved_query` in the UI, no other rule types
+ const mockVersions: ThreeVersionsOf = {
+ base_version: MissingVersion,
+ current_version: 'query',
+ target_version: 'saved_query',
+ };
+
+ const result = ruleTypeDiffAlgorithm(mockVersions);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ has_base_version: false,
+ base_version: undefined,
+ merged_version: mockVersions.target_version,
+ diff_outcome: ThreeWayDiffOutcome.MissingBaseCanUpdate,
+ merge_outcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ })
+ );
+ });
+ });
+});
diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.ts
new file mode 100644
index 0000000000000..0701d1e46d251
--- /dev/null
+++ b/x-pack/plugins/security_solution/server/lib/detection_engine/prebuilt_rules/logic/diff/calculation/algorithms/rule_type_diff_algorithm.ts
@@ -0,0 +1,98 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { assertUnreachable } from '../../../../../../../../common/utility_types';
+import type {
+ DiffableRuleTypes,
+ ThreeVersionsOf,
+ ThreeWayDiff,
+} from '../../../../../../../../common/api/detection_engine/prebuilt_rules';
+import {
+ determineDiffOutcome,
+ determineIfValueCanUpdate,
+ MissingVersion,
+ ThreeWayDiffConflict,
+ ThreeWayDiffOutcome,
+ ThreeWayMergeOutcome,
+} from '../../../../../../../../common/api/detection_engine/prebuilt_rules';
+
+export const ruleTypeDiffAlgorithm = (
+ versions: ThreeVersionsOf
+): ThreeWayDiff => {
+ const {
+ base_version: baseVersion,
+ current_version: currentVersion,
+ target_version: targetVersion,
+ } = versions;
+
+ const diffOutcome = determineDiffOutcome(baseVersion, currentVersion, targetVersion);
+ const valueCanUpdate = determineIfValueCanUpdate(diffOutcome);
+
+ const hasBaseVersion = baseVersion !== MissingVersion;
+
+ const { mergeOutcome, conflict, mergedVersion } = mergeVersions({
+ targetVersion,
+ diffOutcome,
+ });
+
+ return {
+ has_base_version: hasBaseVersion,
+ base_version: hasBaseVersion ? baseVersion : undefined,
+ current_version: currentVersion,
+ target_version: targetVersion,
+ merged_version: mergedVersion,
+ merge_outcome: mergeOutcome,
+
+ diff_outcome: diffOutcome,
+ has_update: valueCanUpdate,
+ conflict,
+ };
+};
+
+interface MergeResult {
+ mergeOutcome: ThreeWayMergeOutcome;
+ mergedVersion: TValue;
+ conflict: ThreeWayDiffConflict;
+}
+
+interface MergeArgs {
+ targetVersion: TValue;
+ diffOutcome: ThreeWayDiffOutcome;
+}
+
+const mergeVersions = ({
+ targetVersion,
+ diffOutcome,
+}: MergeArgs): MergeResult => {
+ switch (diffOutcome) {
+ // Scenario -AA is treated as scenario AAA:
+ // https://github.com/elastic/kibana/pull/184889#discussion_r1636421293
+ case ThreeWayDiffOutcome.MissingBaseNoUpdate:
+ case ThreeWayDiffOutcome.StockValueNoUpdate:
+ return {
+ conflict: ThreeWayDiffConflict.NONE,
+ mergedVersion: targetVersion,
+ mergeOutcome: ThreeWayMergeOutcome.Target,
+ };
+ case ThreeWayDiffOutcome.CustomizedValueNoUpdate:
+ case ThreeWayDiffOutcome.CustomizedValueSameUpdate:
+ case ThreeWayDiffOutcome.StockValueCanUpdate:
+ // NOTE: This scenario is currently inaccessible via normal UI or API workflows, but the logic is covered just in case
+ case ThreeWayDiffOutcome.CustomizedValueCanUpdate:
+ // Scenario -AB is treated as scenario ABC:
+ // https://github.com/elastic/kibana/pull/184889#discussion_r1636421293
+ case ThreeWayDiffOutcome.MissingBaseCanUpdate: {
+ return {
+ mergedVersion: targetVersion,
+ mergeOutcome: ThreeWayMergeOutcome.Target,
+ conflict: ThreeWayDiffConflict.NON_SOLVABLE,
+ };
+ }
+ default:
+ return assertUnreachable(diffOutcome);
+ }
+};
From 7730eaba8d631c56df92e206aafd82084e4351cc Mon Sep 17 00:00:00 2001
From: Alexi Doak <109488926+doakalexi@users.noreply.github.com>
Date: Mon, 30 Sep 2024 10:43:02 -0700
Subject: [PATCH 026/107] [ResponseOps][Connectors] add the "service message"
to the message generated for errors (#194213)
Resolves https://github.com/elastic/kibana/issues/187288
## Summary
When a connector fails we log the error message. In this PR I updated
the log message to include the error message followed by the
serviceMessage if it's populated. This change will help provide more
detailed info in the log messages when a connector fails.
### Checklist
- [ ] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
### To verify
- Create a connector and update the executor code to make it fail
- Create rule with your connector
- Verify that the log message includes more details about the error
instead of something like the following example for email connectors:
`Action '[email connector name]' failed: error sending email"`
---
.../server/lib/task_runner_factory.test.ts | 48 +++++++++++++++++++
.../actions/server/lib/task_runner_factory.ts | 8 +++-
2 files changed, 55 insertions(+), 1 deletion(-)
diff --git a/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts b/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
index 3dd86bdcf148d..6c4cdd31ccf6c 100644
--- a/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
+++ b/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
@@ -890,6 +890,54 @@ describe('Task Runner Factory', () => {
expect(getErrorSource(err)).toBe(TaskErrorSource.FRAMEWORK);
});
+ test(`will throw an error and log the error message with the serviceMessage`, async () => {
+ const taskRunner = taskRunnerFactory.create({
+ taskInstance: {
+ ...mockedTaskInstance,
+ attempts: 0,
+ },
+ });
+
+ mockedEncryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValueOnce({
+ id: '3',
+ type: 'action_task_params',
+ attributes: {
+ actionId: '2',
+ params: { baz: true },
+ executionId: '123abc',
+ apiKey: Buffer.from('123:abc').toString('base64'),
+ },
+ references: [
+ {
+ id: '2',
+ name: 'actionRef',
+ type: 'action',
+ },
+ ],
+ });
+ mockedActionExecutor.execute.mockResolvedValueOnce({
+ status: 'error',
+ actionId: '2',
+ message: 'Error message',
+ serviceMessage: 'Service message',
+ data: { foo: true },
+ retry: false,
+ errorSource: TaskErrorSource.FRAMEWORK,
+ });
+
+ let err;
+ try {
+ await taskRunner.run();
+ } catch (e) {
+ err = e;
+ }
+
+ expect(err).toBeDefined();
+ expect(taskRunnerFactoryInitializerParams.logger.error as jest.Mock).toHaveBeenCalledWith(
+ `Action '2' failed: Error message: Service message`
+ );
+ });
+
test(`fallbacks to FRAMEWORK error if ActionExecutor does not return any type of source'`, async () => {
const taskRunner = taskRunnerFactory.create({
taskInstance: {
diff --git a/x-pack/plugins/actions/server/lib/task_runner_factory.ts b/x-pack/plugins/actions/server/lib/task_runner_factory.ts
index 2c28c61cad3de..d6b418c481ea5 100644
--- a/x-pack/plugins/actions/server/lib/task_runner_factory.ts
+++ b/x-pack/plugins/actions/server/lib/task_runner_factory.ts
@@ -150,7 +150,13 @@ export class TaskRunnerFactory {
inMemoryMetrics.increment(IN_MEMORY_METRICS.ACTION_EXECUTIONS);
if (executorResult.status === 'error') {
inMemoryMetrics.increment(IN_MEMORY_METRICS.ACTION_FAILURES);
- logger.error(`Action '${actionId}' failed: ${executorResult.message}`);
+
+ let message = executorResult.message;
+ if (executorResult.serviceMessage) {
+ message = `${message}: ${executorResult.serviceMessage}`;
+ }
+ logger.error(`Action '${actionId}' failed: ${message}`);
+
// Task manager error handler only kicks in when an error thrown (at this time)
// So what we have to do is throw when the return status is `error`.
throw throwRetryableError(
From f7d1dd4bf35648b7a3db8fc7d16666f93949b43c Mon Sep 17 00:00:00 2001
From: Rodney Norris
Date: Mon, 30 Sep 2024 12:49:59 -0500
Subject: [PATCH 027/107] [Search][Onboarding] Start Page File Upload & O11y
links (#194231)
## Summary
- Clean-up for start page
- enabled submitting create index form w/ enter in index name input
- extracted start page example to hook to make it easier to update later
- Moved start page language up so changes are saved when switching
between UI & Code views
- Added File Upload link to the ML file uploader
- Added callouts for O11y
### Screenshots
### Checklist
- [x] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [x] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
---------
Co-authored-by: Michael DeFazio
---
.../search_indices/common/doc_links.ts | 2 +
.../public/analytics/constants.ts | 1 +
.../public/code_examples/create_index.ts | 2 +
.../public/components/start/create_index.tsx | 197 ++++++++-----
.../components/start/create_index_code.tsx | 32 +--
.../components/start/elasticsearch_start.tsx | 260 ++++++++++++------
.../start/hooks/use_coding_examples.tsx | 15 +
.../public/components/start/types.ts | 3 +
.../plugins/search_indices/public/plugin.ts | 3 +
x-pack/plugins/search_indices/public/types.ts | 1 +
.../svl_search_elasticsearch_start_page.ts | 23 ++
.../search/config.feature_flags.ts | 9 +-
.../functional/test_suites/search/config.ts | 6 +-
.../test_suites/search/elasticsearch_start.ts | 12 +
14 files changed, 389 insertions(+), 177 deletions(-)
create mode 100644 x-pack/plugins/search_indices/public/components/start/hooks/use_coding_examples.tsx
diff --git a/x-pack/plugins/search_indices/common/doc_links.ts b/x-pack/plugins/search_indices/common/doc_links.ts
index 8cceb45041ab9..d7e7119dd7004 100644
--- a/x-pack/plugins/search_indices/common/doc_links.ts
+++ b/x-pack/plugins/search_indices/common/doc_links.ts
@@ -10,12 +10,14 @@ import { DocLinks } from '@kbn/doc-links';
class SearchIndicesDocLinks {
public apiReference: string = '';
public setupSemanticSearch: string = '';
+ public analyzeLogs: string = '';
constructor() {}
setDocLinks(newDocLinks: DocLinks) {
this.apiReference = newDocLinks.apiReference;
this.setupSemanticSearch = newDocLinks.enterpriseSearch.semanticSearch;
+ this.analyzeLogs = newDocLinks.serverlessSearch.integrations;
}
}
export const docLinks = new SearchIndicesDocLinks();
diff --git a/x-pack/plugins/search_indices/public/analytics/constants.ts b/x-pack/plugins/search_indices/public/analytics/constants.ts
index 563e5b62382c0..2a8c6d0d0ea0d 100644
--- a/x-pack/plugins/search_indices/public/analytics/constants.ts
+++ b/x-pack/plugins/search_indices/public/analytics/constants.ts
@@ -13,6 +13,7 @@ export enum AnalyticsEvents {
startCreateIndexLanguageSelect = 'start_code_lang_select',
startCreateIndexCodeCopyInstall = 'start_code_copy_install',
startCreateIndexCodeCopy = 'start_code_copy',
+ startFileUploadClick = 'start_file_upload',
indexDetailsInstallCodeCopy = 'index_details_code_copy_install',
indexDetailsAddMappingsCodeCopy = 'index_details_add_mappings_code_copy',
indexDetailsIngestDocumentsCodeCopy = 'index_details_ingest_documents_code_copy',
diff --git a/x-pack/plugins/search_indices/public/code_examples/create_index.ts b/x-pack/plugins/search_indices/public/code_examples/create_index.ts
index 627329b37d0be..01d969df3d70d 100644
--- a/x-pack/plugins/search_indices/public/code_examples/create_index.ts
+++ b/x-pack/plugins/search_indices/public/code_examples/create_index.ts
@@ -13,6 +13,7 @@ import { PythonServerlessCreateIndexExamples } from './python';
import { ConsoleCreateIndexExamples } from './sense';
export const DefaultServerlessCodeExamples: CreateIndexCodeExamples = {
+ exampleType: 'search',
sense: ConsoleCreateIndexExamples.default,
curl: CurlCreateIndexExamples.default,
python: PythonServerlessCreateIndexExamples.default,
@@ -20,6 +21,7 @@ export const DefaultServerlessCodeExamples: CreateIndexCodeExamples = {
};
export const DenseVectorSeverlessCodeExamples: CreateIndexCodeExamples = {
+ exampleType: 'vector',
sense: ConsoleCreateIndexExamples.dense_vector,
curl: CurlCreateIndexExamples.dense_vector,
python: PythonServerlessCreateIndexExamples.dense_vector,
diff --git a/x-pack/plugins/search_indices/public/components/start/create_index.tsx b/x-pack/plugins/search_indices/public/components/start/create_index.tsx
index ae191481e5da4..bd80922d79689 100644
--- a/x-pack/plugins/search_indices/public/components/start/create_index.tsx
+++ b/x-pack/plugins/search_indices/public/components/start/create_index.tsx
@@ -13,12 +13,16 @@ import {
EuiFlexItem,
EuiForm,
EuiFormRow,
+ EuiHorizontalRule,
EuiIcon,
+ EuiLink,
+ EuiPanel,
EuiSpacer,
EuiText,
EuiToolTip,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
import type { UserStartPrivilegesResponse } from '../../../common';
import { AnalyticsEvents } from '../../analytics/constants';
@@ -28,6 +32,7 @@ import { isValidIndexName } from '../../utils/indices';
import { useCreateIndex } from './hooks/use_create_index';
import { CreateIndexFormState } from './types';
+import { useKibana } from '../../hooks/use_kibana';
const CREATE_INDEX_CONTENT = i18n.translate(
'xpack.searchIndices.startPage.createIndex.action.text',
@@ -47,6 +52,7 @@ export const CreateIndexForm = ({
formState,
setFormState,
}: CreateIndexFormProps) => {
+ const { application } = useKibana().services;
const [indexNameHasError, setIndexNameHasError] = useState(false);
const usageTracker = useUsageTracker();
const { createIndex, isLoading } = useCreateIndex();
@@ -65,93 +71,136 @@ export const CreateIndexForm = ({
setIndexNameHasError(invalidIndexName);
}
};
+ const onFileUpload = useCallback(() => {
+ usageTracker.click(AnalyticsEvents.startFileUploadClick);
+ application.navigateToApp('ml', { path: 'filedatavisualizer' });
+ }, [usageTracker, application]);
return (
-
-
-
+
+
-
-
-
-
- {userPrivileges?.privileges?.canCreateIndex === false ? (
-
- {i18n.translate('xpack.searchIndices.startPage.createIndex.permissionTooltip', {
- defaultMessage: 'You do not have permission to create an index.',
- })}
-
- {i18n.translate('xpack.searchIndices.startPage.pageDescription', {
- defaultMessage: 'Vectorize, search, and visualize your data',
- })}
-
-
-
-
-
+
+
+
+
+
+ {i18n.translate('xpack.searchIndices.startPage.pageDescription', {
+ defaultMessage: 'Vectorize, search, and visualize your data',
+ })}
+
+
+
-
-
-
-
-
-
- {i18n.translate('xpack.searchIndices.startPage.createIndex.title', {
- defaultMessage: 'Create your first index',
- })}
-
-
-
-
-
-
-
-
-
- {i18n.translate('xpack.searchIndices.startPage.createIndex.description', {
- defaultMessage:
- 'An index stores your data and defines the schema, or field mappings, for your searches',
- })}
-
+ {i18n.translate('xpack.searchIndices.startPage.createIndex.description', {
+ defaultMessage:
+ 'An index stores your data and defines the schema, or field mappings, for your searches',
+ })}
+
+ {i18n.translate('xpack.searchIndices.startPage.observabilityCallout.title', {
+ defaultMessage: 'Looking to store your logs or metrics data?',
+ })}
+
+
+
+
+
+
+
+ {i18n.translate('xpack.searchIndices.startPage.observabilityCallout.logs.button', {
+ defaultMessage: 'Collect and analyze logs',
+ })}
+
+
+
+ {i18n.translate(
+ 'xpack.searchIndices.startPage.observabilityCallout.logs.subTitle',
+ {
+ defaultMessage: 'Explore Logstash and Beats',
+ }
+ )}
+
+
+
+
+ or
+
+
+
+ {i18n.translate(
+ 'xpack.searchIndices.startPage.observabilityCallout.o11yTrial.button',
+ {
+ defaultMessage: 'Start an Observability trial',
+ }
+ )}
+
+
+
+ {i18n.translate(
+ 'xpack.searchIndices.startPage.observabilityCallout.o11yTrial.subTitle',
+ {
+ defaultMessage: 'Powerful performance monitoring',
+ }
+ )}
+
+
+
+
);
diff --git a/x-pack/plugins/search_indices/public/components/start/hooks/use_coding_examples.tsx b/x-pack/plugins/search_indices/public/components/start/hooks/use_coding_examples.tsx
new file mode 100644
index 0000000000000..1a351d10943f2
--- /dev/null
+++ b/x-pack/plugins/search_indices/public/components/start/hooks/use_coding_examples.tsx
@@ -0,0 +1,15 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { CreateIndexCodeExamples } from '../../../types';
+import { DenseVectorSeverlessCodeExamples } from '../../../code_examples/create_index';
+
+export const useStartPageCodingExamples = (): CreateIndexCodeExamples => {
+ // TODO: in the future this will be dynamic based on the onboarding token
+ // or project sub-type
+ return DenseVectorSeverlessCodeExamples;
+};
diff --git a/x-pack/plugins/search_indices/public/components/start/types.ts b/x-pack/plugins/search_indices/public/components/start/types.ts
index 6b6c1c8e38f61..c0dbbeca88883 100644
--- a/x-pack/plugins/search_indices/public/components/start/types.ts
+++ b/x-pack/plugins/search_indices/public/components/start/types.ts
@@ -5,6 +5,9 @@
* 2.0.
*/
+import type { AvailableLanguages } from '../../code_examples';
+
export interface CreateIndexFormState {
indexName: string;
+ codingLanguage: AvailableLanguages;
}
diff --git a/x-pack/plugins/search_indices/public/plugin.ts b/x-pack/plugins/search_indices/public/plugin.ts
index 5ebfb84e1cd39..bec4f7cb7bfe6 100644
--- a/x-pack/plugins/search_indices/public/plugin.ts
+++ b/x-pack/plugins/search_indices/public/plugin.ts
@@ -7,6 +7,8 @@
import type { CoreSetup, CoreStart, Plugin } from '@kbn/core/public';
import { i18n } from '@kbn/i18n';
+
+import { docLinks } from '../common/doc_links';
import type {
SearchIndicesAppPluginStartDependencies,
SearchIndicesPluginSetup,
@@ -64,6 +66,7 @@ export class SearchIndicesPlugin
}
public start(core: CoreStart): SearchIndicesPluginStart {
+ docLinks.setDocLinks(core.docLinks.links);
return {};
}
diff --git a/x-pack/plugins/search_indices/public/types.ts b/x-pack/plugins/search_indices/public/types.ts
index 95f5eb2883d2e..a3e63df2642b3 100644
--- a/x-pack/plugins/search_indices/public/types.ts
+++ b/x-pack/plugins/search_indices/public/types.ts
@@ -77,6 +77,7 @@ export interface CreateIndexCodeDefinition {
}
export interface CreateIndexCodeExamples {
+ exampleType: string;
sense: CreateIndexCodeDefinition;
curl: CreateIndexCodeDefinition;
python: CreateIndexCodeDefinition;
diff --git a/x-pack/test_serverless/functional/page_objects/svl_search_elasticsearch_start_page.ts b/x-pack/test_serverless/functional/page_objects/svl_search_elasticsearch_start_page.ts
index 33dbc6f693ea8..798d396258e75 100644
--- a/x-pack/test_serverless/functional/page_objects/svl_search_elasticsearch_start_page.ts
+++ b/x-pack/test_serverless/functional/page_objects/svl_search_elasticsearch_start_page.ts
@@ -30,6 +30,11 @@ export function SvlSearchElasticsearchStartPageProvider({ getService }: FtrProvi
);
});
},
+ async expectToBeOnMLFileUploadPage() {
+ await retry.tryForTime(60 * 1000, async () => {
+ expect(await browser.getCurrentUrl()).contain('/app/ml/filedatavisualizer');
+ });
+ },
async expectIndexNameToExist() {
await testSubjects.existOrFail('indexNameField');
},
@@ -67,5 +72,23 @@ export function SvlSearchElasticsearchStartPageProvider({ getService }: FtrProvi
await testSubjects.existOrFail('createIndexCodeViewBtn');
await testSubjects.click('createIndexCodeViewBtn');
},
+ async clickFileUploadLink() {
+ await testSubjects.existOrFail('uploadFileLink');
+ await testSubjects.click('uploadFileLink');
+ },
+ async expectAnalyzeLogsLink() {
+ await testSubjects.existOrFail('analyzeLogsBtn');
+ expect(await testSubjects.getAttribute('analyzeLogsBtn', 'href')).equal(
+ 'https://docs.elastic.co/serverless/elasticsearch/ingest-your-data'
+ );
+ expect(await testSubjects.getAttribute('analyzeLogsBtn', 'target')).equal('_blank');
+ },
+ async expectO11yTrialLink() {
+ await testSubjects.existOrFail('startO11yTrialBtn');
+ expect(await testSubjects.getAttribute('startO11yTrialBtn', 'href')).equal(
+ 'https://fake-cloud.elastic.co/projects/create/observability/start'
+ );
+ expect(await testSubjects.getAttribute('startO11yTrialBtn', 'target')).equal('_blank');
+ },
};
}
diff --git a/x-pack/test_serverless/functional/test_suites/search/config.feature_flags.ts b/x-pack/test_serverless/functional/test_suites/search/config.feature_flags.ts
index 05eb6136bf008..824d145282257 100644
--- a/x-pack/test_serverless/functional/test_suites/search/config.feature_flags.ts
+++ b/x-pack/test_serverless/functional/test_suites/search/config.feature_flags.ts
@@ -19,10 +19,11 @@ export default createTestConfig({
suiteTags: { exclude: ['skipSvlSearch'] },
// add feature flags
kbnServerArgs: [
- `--xpack.cloud.id='ES3_FTR_TESTS:ZmFrZS1kb21haW4uY2xkLmVsc3RjLmNvJGZha2Vwcm9qZWN0aWQuZXMkZmFrZXByb2plY3RpZC5rYg=='`,
- `--xpack.cloud.serverless.project_id='fakeprojectid'`,
- `--xpack.cloud.base_url='https://cloud.elastic.co'`,
- `--xpack.cloud.organization_url='/account/members'`,
+ `--xpack.cloud.id=ES3_FTR_TESTS:ZmFrZS1kb21haW4uY2xkLmVsc3RjLmNvJGZha2Vwcm9qZWN0aWQuZXMkZmFrZXByb2plY3RpZC5rYg==`,
+ `--xpack.cloud.serverless.project_id=fakeprojectid`,
+ `--xpack.cloud.base_url=https://fake-cloud.elastic.co`,
+ `--xpack.cloud.projects_url=/projects/`,
+ `--xpack.cloud.organization_url=/account/members`,
`--xpack.security.roleManagementEnabled=true`,
`--xpack.spaces.maxSpaces=100`, // enables spaces UI capabilities
`--xpack.searchIndices.enabled=true`, // global empty state FF
diff --git a/x-pack/test_serverless/functional/test_suites/search/config.ts b/x-pack/test_serverless/functional/test_suites/search/config.ts
index 6853e75d987b8..4739cde53bf86 100644
--- a/x-pack/test_serverless/functional/test_suites/search/config.ts
+++ b/x-pack/test_serverless/functional/test_suites/search/config.ts
@@ -19,7 +19,9 @@ export default createTestConfig({
// https://github.com/elastic/project-controller/blob/main/internal/project/esproject/config/elasticsearch.yml
esServerArgs: [],
kbnServerArgs: [
- `--xpack.cloud.id='ES3_FTR_TESTS:ZmFrZS1kb21haW4uY2xkLmVsc3RjLmNvJGZha2Vwcm9qZWN0aWQuZXMkZmFrZXByb2plY3RpZC5rYg=='`,
- `--xpack.cloud.serverless.project_id='fakeprojectid'`,
+ `--xpack.cloud.id=ES3_FTR_TESTS:ZmFrZS1kb21haW4uY2xkLmVsc3RjLmNvJGZha2Vwcm9qZWN0aWQuZXMkZmFrZXByb2plY3RpZC5rYg==`,
+ `--xpack.cloud.serverless.project_id=fakeprojectid`,
+ `--xpack.cloud.base_url=https://fake-cloud.elastic.co`,
+ `--xpack.cloud.projects_url=/projects/`,
],
});
diff --git a/x-pack/test_serverless/functional/test_suites/search/elasticsearch_start.ts b/x-pack/test_serverless/functional/test_suites/search/elasticsearch_start.ts
index 55f1551141e47..f6362a409658e 100644
--- a/x-pack/test_serverless/functional/test_suites/search/elasticsearch_start.ts
+++ b/x-pack/test_serverless/functional/test_suites/search/elasticsearch_start.ts
@@ -81,6 +81,18 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) {
await pageObjects.svlSearchElasticsearchStartPage.clickUIViewButton();
await pageObjects.svlSearchElasticsearchStartPage.expectCreateIndexUIView();
});
+
+ it('should have file upload link', async () => {
+ await pageObjects.svlSearchElasticsearchStartPage.expectToBeOnStartPage();
+ await pageObjects.svlSearchElasticsearchStartPage.clickFileUploadLink();
+ await pageObjects.svlSearchElasticsearchStartPage.expectToBeOnMLFileUploadPage();
+ });
+
+ it('should have o11y links', async () => {
+ await pageObjects.svlSearchElasticsearchStartPage.expectToBeOnStartPage();
+ await pageObjects.svlSearchElasticsearchStartPage.expectAnalyzeLogsLink();
+ await pageObjects.svlSearchElasticsearchStartPage.expectO11yTrialLink();
+ });
});
describe('viewer', function () {
before(async () => {
From c362ab8b920a0c34582ab046e8a3925e7a2d42fb Mon Sep 17 00:00:00 2001
From: Steph Milovic
Date: Mon, 30 Sep 2024 11:59:14 -0600
Subject: [PATCH 028/107] [Security solution] Update ebt fields script to new
Vault policy (#194458)
---
x-pack/plugins/security_solution/scripts/telemetry/README.md | 2 +-
.../security_solution/scripts/telemetry/build_ebt_data_view.ts | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/x-pack/plugins/security_solution/scripts/telemetry/README.md b/x-pack/plugins/security_solution/scripts/telemetry/README.md
index ea17f9b4a140c..f1cfa92dd731c 100644
--- a/x-pack/plugins/security_solution/scripts/telemetry/README.md
+++ b/x-pack/plugins/security_solution/scripts/telemetry/README.md
@@ -13,7 +13,7 @@ If you have further events to be included in the data views, please update the s
### Usage
-1. Login with Vault (`vault login -method github`), ensure you have siem-team access. If you have never accessed Vault before, follow [these instructions](https://github.com/elastic/infra/blob/master/docs/vault/README.md)
+1. Login with Vault (`vault login -method oidc`), ensure you have siem-team access. If you have never accessed Vault before, follow [these instructions](https://github.com/elastic/infra/blob/master/docs/vault/README.md)
2. cd into this directory
3. Run the script with the appropriate arguments. By default, the script will run for the `security-solution-ebt-kibana-browser` data view in the `securitysolution` space. If you want to run the script for the server data view, pass the `--telemetry_type` argument with the value `server`.
diff --git a/x-pack/plugins/security_solution/scripts/telemetry/build_ebt_data_view.ts b/x-pack/plugins/security_solution/scripts/telemetry/build_ebt_data_view.ts
index e17b72d195c6c..9cf8fe842a9e9 100755
--- a/x-pack/plugins/security_solution/scripts/telemetry/build_ebt_data_view.ts
+++ b/x-pack/plugins/security_solution/scripts/telemetry/build_ebt_data_view.ts
@@ -11,7 +11,7 @@ import { events as genAiEvents } from '@kbn/elastic-assistant-plugin/server/lib/
import { events as securityEvents } from '../../server/lib/telemetry/event_based/events';
import { telemetryEvents } from '../../public/common/lib/telemetry/events/telemetry_events';
-// uncomment and add to run script, but do not commit as creates cirular dependency
+// uncomment and add to run script, but do not commit as creates circular dependency
// import { telemetryEvents as serverlessEvents } from '@kbn/security-solution-serverless/server/telemetry/event_based_telemetry';
const logger = new ToolingLog({
From 5f83ac05991cd980ef5b205acd19c997b60045a3 Mon Sep 17 00:00:00 2001
From: Marshall Main <55718608+marshallmain@users.noreply.github.com>
Date: Mon, 30 Sep 2024 11:07:39 -0700
Subject: [PATCH 029/107] [Security Solution][Detection Engine] Avoid creating
list items for empty lines in import list API (#192681)
## Summary
The quickstart tooling introduced in
https://github.com/elastic/kibana/pull/190634 uses axios under the hood
to make requests to Kibana. When attaching file data to the axios
request with `FormData`, axios adds an extra empty line after the end
content boundary. The logic in `buffer_lines.ts` assumes that there are
no more lines after the end content boundary line, so importing a list
with the quickstart tooling would create a list with an extra empty
item. This empty item fails validation when retrieved through other
APIs.
This PR prevents lines after the end content boundary from being turned
into list items in the import list API.
---
.../services/items/buffer_lines.test.ts | 92 +++++++------------
.../server/services/items/buffer_lines.ts | 2 +-
.../write_lines_to_bulk_list_items.test.ts | 12 +++
3 files changed, 45 insertions(+), 61 deletions(-)
diff --git a/x-pack/plugins/lists/server/services/items/buffer_lines.test.ts b/x-pack/plugins/lists/server/services/items/buffer_lines.test.ts
index 509233d006b73..7faef72c38907 100644
--- a/x-pack/plugins/lists/server/services/items/buffer_lines.test.ts
+++ b/x-pack/plugins/lists/server/services/items/buffer_lines.test.ts
@@ -23,9 +23,15 @@ describe('buffer_lines', () => {
}).toThrow('bufferSize must be greater than zero');
});
- test('it can read a single line', (done) => {
+ test('two identical lines are collapsed into just one line without duplicates', (done) => {
const input = new TestReadable();
+ input.push('--boundary\n');
+ input.push('Content-type: text/plain\n');
+ input.push('Content-Disposition: form-data; name="fieldName"; filename="filename.text"\n');
+ input.push('\n');
+ input.push('line one\n');
input.push('line one\n');
+ input.push('--boundary--\n');
input.push(null);
const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
let linesToTest: string[] = [];
@@ -38,25 +44,8 @@ describe('buffer_lines', () => {
});
});
- test('it can read a single line using a buffer size of 1', (done) => {
+ test('it can close out without writing any lines', (done) => {
const input = new TestReadable();
- input.push('line one\n');
- input.push(null);
- const bufferedLine = new BufferLines({ bufferSize: 1, input });
- let linesToTest: string[] = [];
- bufferedLine.on('lines', (lines: string[]) => {
- linesToTest = [...linesToTest, ...lines];
- });
- bufferedLine.on('close', () => {
- expect(linesToTest).toEqual(['line one']);
- done();
- });
- });
-
- test('it can read two lines', (done) => {
- const input = new TestReadable();
- input.push('line one\n');
- input.push('line two\n');
input.push(null);
const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
let linesToTest: string[] = [];
@@ -64,74 +53,56 @@ describe('buffer_lines', () => {
linesToTest = [...linesToTest, ...lines];
});
bufferedLine.on('close', () => {
- expect(linesToTest).toEqual(['line one', 'line two']);
- done();
- });
- });
-
- test('it can read two lines using a buffer size of 1', (done) => {
- const input = new TestReadable();
- input.push('line one\n');
- input.push('line two\n');
- input.push(null);
- const bufferedLine = new BufferLines({ bufferSize: 1, input });
- let linesToTest: string[] = [];
- bufferedLine.on('lines', (lines: string[]) => {
- linesToTest = [...linesToTest, ...lines];
- });
- bufferedLine.on('close', () => {
- expect(linesToTest).toEqual(['line one', 'line two']);
+ expect(linesToTest).toEqual([]);
done();
});
});
- test('two identical lines are collapsed into just one line without duplicates', (done) => {
+ test('it can read 200 lines', (done) => {
const input = new TestReadable();
- input.push('line one\n');
- input.push('line one\n');
- input.push(null);
const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
+ input.push('--boundary\n');
+ input.push('Content-type: text/plain\n');
+ input.push('Content-Disposition: form-data; name="fieldName"; filename="filename.text"\n');
+ input.push('\n');
let linesToTest: string[] = [];
- bufferedLine.on('lines', (lines: string[]) => {
- linesToTest = [...linesToTest, ...lines];
- });
- bufferedLine.on('close', () => {
- expect(linesToTest).toEqual(['line one']);
- done();
- });
- });
-
- test('it can close out without writing any lines', (done) => {
- const input = new TestReadable();
+ const size200: string[] = new Array(200).fill(null).map((_, index) => `${index}\n`);
+ size200.forEach((element) => input.push(element));
+ input.push('--boundary--\n');
input.push(null);
- const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
- let linesToTest: string[] = [];
bufferedLine.on('lines', (lines: string[]) => {
linesToTest = [...linesToTest, ...lines];
});
bufferedLine.on('close', () => {
- expect(linesToTest).toEqual([]);
+ expect(linesToTest.length).toEqual(200);
done();
});
});
- test('it can read 200 lines', (done) => {
+ test('it can read an example multi-part message', (done) => {
const input = new TestReadable();
+ input.push('--boundary\n');
+ input.push('Content-type: text/plain\n');
+ input.push('Content-Disposition: form-data; name="fieldName"; filename="filename.text"\n');
+ input.push('\n');
+ input.push('127.0.0.1\n');
+ input.push('127.0.0.2\n');
+ input.push('127.0.0.3\n');
+ input.push('\n');
+ input.push('--boundary--\n');
+ input.push(null);
const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
let linesToTest: string[] = [];
- const size200: string[] = new Array(200).fill(null).map((_, index) => `${index}\n`);
- size200.forEach((element) => input.push(element));
- input.push(null);
bufferedLine.on('lines', (lines: string[]) => {
linesToTest = [...linesToTest, ...lines];
});
bufferedLine.on('close', () => {
- expect(linesToTest.length).toEqual(200);
+ expect(linesToTest).toEqual(['127.0.0.1', '127.0.0.2', '127.0.0.3']);
done();
});
});
- test('it can read an example multi-part message', (done) => {
+ test('it does not create empty values for lines after the end boundary', (done) => {
const input = new TestReadable();
input.push('--boundary\n');
input.push('Content-type: text/plain\n');
@@ -142,6 +113,7 @@ describe('buffer_lines', () => {
input.push('127.0.0.3\n');
input.push('\n');
input.push('--boundary--\n');
+ input.push('\n');
input.push(null);
const bufferedLine = new BufferLines({ bufferSize: IMPORT_BUFFER_SIZE, input });
let linesToTest: string[] = [];
diff --git a/x-pack/plugins/lists/server/services/items/buffer_lines.ts b/x-pack/plugins/lists/server/services/items/buffer_lines.ts
index cb6d073010127..e179fc4358468 100644
--- a/x-pack/plugins/lists/server/services/items/buffer_lines.ts
+++ b/x-pack/plugins/lists/server/services/items/buffer_lines.ts
@@ -48,7 +48,7 @@ export class BufferLines extends Readable {
// we are at the end of the stream
this.boundary = null;
this.readableText = false;
- } else {
+ } else if (this.readableText) {
// we have actual content to push
this.push(line);
}
diff --git a/x-pack/plugins/lists/server/services/items/write_lines_to_bulk_list_items.test.ts b/x-pack/plugins/lists/server/services/items/write_lines_to_bulk_list_items.test.ts
index 78098fde59827..89c3d89fe631e 100644
--- a/x-pack/plugins/lists/server/services/items/write_lines_to_bulk_list_items.test.ts
+++ b/x-pack/plugins/lists/server/services/items/write_lines_to_bulk_list_items.test.ts
@@ -49,6 +49,12 @@ describe('write_lines_to_bulk_list_items', () => {
test('It imports a set of items to a write buffer by calling "getListItemByValues" with a single value given', async () => {
const options = getImportListItemsToStreamOptionsMock();
const promise = importListItemsToStream(options);
+ options.stream.push('--boundary\n');
+ options.stream.push('Content-type: text/plain\n');
+ options.stream.push(
+ 'Content-Disposition: form-data; name="fieldName"; filename="filename.text"\n'
+ );
+ options.stream.push('\n');
options.stream.push('127.0.0.1\n');
options.stream.push(null);
await promise;
@@ -58,6 +64,12 @@ describe('write_lines_to_bulk_list_items', () => {
test('It imports a set of items to a write buffer by calling "getListItemByValues" with two values given', async () => {
const options = getImportListItemsToStreamOptionsMock();
const promise = importListItemsToStream(options);
+ options.stream.push('--boundary\n');
+ options.stream.push('Content-type: text/plain\n');
+ options.stream.push(
+ 'Content-Disposition: form-data; name="fieldName"; filename="filename.text"\n'
+ );
+ options.stream.push('\n');
options.stream.push('127.0.0.1\n');
options.stream.push('127.0.0.2\n');
options.stream.push(null);
From d922ee1f8c1051633d58b34b5f272619687786de Mon Sep 17 00:00:00 2001
From: Toby Brain
Date: Tue, 1 Oct 2024 04:14:11 +1000
Subject: [PATCH 030/107] Support global_data_tags in the policy update request
(#194421)
## Summary
https://github.com/elastic/kibana/pull/183563 adds support for
`global_data_tags` however the field is not added to the update request
definition in the OpenAPI spec. This PR defines the field within the API
spec.
Related to
https://github.com/elastic/terraform-provider-elasticstack/pull/730
### Checklist
Delete any items that are not applicable to this PR.
- [x] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [x]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [x] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [x] Any UI touched in this PR is usable by keyboard only (learn more
about [keyboard accessibility](https://webaim.org/techniques/keyboard/))
- [x] Any UI touched in this PR does not create any new axe failures
(run axe in browser:
[FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/),
[Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US))
- [x] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [x] This renders correctly on smaller devices using a responsive
layout. (You can test this [in your
browser](https://www.browserstack.com/guide/responsive-testing-on-local-server))
- [x] This was checked for [cross-browser
compatibility](https://www.elastic.co/support/matrix#matrix_browsers)
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
---
.../output/kibana.serverless.staging.yaml | 11 +++++++++++
oas_docs/output/kibana.serverless.yaml | 11 +++++++++++
oas_docs/output/kibana.staging.yaml | 11 +++++++++++
oas_docs/output/kibana.yaml | 11 +++++++++++
.../plugins/fleet/common/openapi/bundled.json | 19 ++++++++++++++++++-
.../plugins/fleet/common/openapi/bundled.yaml | 11 +++++++++++
.../schemas/agent_policy_update_request.yaml | 9 +++++++++
7 files changed, 82 insertions(+), 1 deletion(-)
diff --git a/oas_docs/output/kibana.serverless.staging.yaml b/oas_docs/output/kibana.serverless.staging.yaml
index a5d53bd71cc83..f4ed7a9767924 100644
--- a/oas_docs/output/kibana.serverless.staging.yaml
+++ b/oas_docs/output/kibana.serverless.staging.yaml
@@ -19977,6 +19977,17 @@ components:
force:
description: Force agent policy creation even if packages are not verified.
type: boolean
+ global_data_tags:
+ items:
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: >-
+ User defined data tags that are added to all of the inputs. The
+ values can be strings or numbers.
+ type: object
+ type: array
inactivity_timeout:
type: integer
is_protected:
diff --git a/oas_docs/output/kibana.serverless.yaml b/oas_docs/output/kibana.serverless.yaml
index 93c3a5533c8a0..1b4209e9eec0a 100644
--- a/oas_docs/output/kibana.serverless.yaml
+++ b/oas_docs/output/kibana.serverless.yaml
@@ -12821,6 +12821,17 @@ components:
force:
description: Force agent policy creation even if packages are not verified.
type: boolean
+ global_data_tags:
+ items:
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: >-
+ User defined data tags that are added to all of the inputs. The
+ values can be strings or numbers.
+ type: object
+ type: array
inactivity_timeout:
type: integer
is_protected:
diff --git a/oas_docs/output/kibana.staging.yaml b/oas_docs/output/kibana.staging.yaml
index 96352fc0cd962..618bd42ab1f72 100644
--- a/oas_docs/output/kibana.staging.yaml
+++ b/oas_docs/output/kibana.staging.yaml
@@ -27753,6 +27753,17 @@ components:
force:
description: Force agent policy creation even if packages are not verified.
type: boolean
+ global_data_tags:
+ items:
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: >-
+ User defined data tags that are added to all of the inputs. The
+ values can be strings or numbers.
+ type: object
+ type: array
inactivity_timeout:
type: integer
is_protected:
diff --git a/oas_docs/output/kibana.yaml b/oas_docs/output/kibana.yaml
index 6d53cb1a38bdd..35a446f538a6a 100644
--- a/oas_docs/output/kibana.yaml
+++ b/oas_docs/output/kibana.yaml
@@ -19780,6 +19780,17 @@ components:
force:
description: Force agent policy creation even if packages are not verified.
type: boolean
+ global_data_tags:
+ items:
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: >-
+ User defined data tags that are added to all of the inputs. The
+ values can be strings or numbers.
+ type: object
+ type: array
inactivity_timeout:
type: integer
is_protected:
diff --git a/x-pack/plugins/fleet/common/openapi/bundled.json b/x-pack/plugins/fleet/common/openapi/bundled.json
index 2d74305ad3bd5..7ddd44baacf2d 100644
--- a/x-pack/plugins/fleet/common/openapi/bundled.json
+++ b/x-pack/plugins/fleet/common/openapi/bundled.json
@@ -7812,6 +7812,23 @@
"force": {
"type": "boolean",
"description": "Force agent policy creation even if packages are not verified."
+ },
+ "global_data_tags": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ }
+ ]
+ },
+ "description": "User defined data tags that are added to all of the inputs. The values can be strings or numbers."
+ }
}
},
"required": [
@@ -9563,4 +9580,4 @@
"basicAuth": []
}
]
-}
+}
\ No newline at end of file
diff --git a/x-pack/plugins/fleet/common/openapi/bundled.yaml b/x-pack/plugins/fleet/common/openapi/bundled.yaml
index c36758e8a4432..d60963068b8e5 100644
--- a/x-pack/plugins/fleet/common/openapi/bundled.yaml
+++ b/x-pack/plugins/fleet/common/openapi/bundled.yaml
@@ -5011,6 +5011,17 @@ components:
force:
type: boolean
description: Force agent policy creation even if packages are not verified.
+ global_data_tags:
+ type: array
+ items:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: >-
+ User defined data tags that are added to all of the inputs. The
+ values can be strings or numbers.
required:
- name
- namespace
diff --git a/x-pack/plugins/fleet/common/openapi/components/schemas/agent_policy_update_request.yaml b/x-pack/plugins/fleet/common/openapi/components/schemas/agent_policy_update_request.yaml
index 7fb5581aa79e4..1d1dbd45037ae 100644
--- a/x-pack/plugins/fleet/common/openapi/components/schemas/agent_policy_update_request.yaml
+++ b/x-pack/plugins/fleet/common/openapi/components/schemas/agent_policy_update_request.yaml
@@ -47,6 +47,15 @@ properties:
force:
type: boolean
description: Force agent policy creation even if packages are not verified.
+ global_data_tags:
+ type: array
+ items:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: string
+ - type: number
+ description: User defined data tags that are added to all of the inputs. The values can be strings or numbers.
required:
- name
- namespace
From 866adf37f13c79c06fe426fa822231940132de03 Mon Sep 17 00:00:00 2001
From: Cee Chen <549407+cee-chen@users.noreply.github.com>
Date: Mon, 30 Sep 2024 11:37:47 -0700
Subject: [PATCH 031/107] Delete imports/references to EUI's distributed `.css`
files (#194237)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## Summary
Trying #194082 again, this time wholly deleting
`kbn-ui-shared-deps-npm.v8.light/dark.css` as well 🤞
Original PR description:
> These files no longer contain any meaningful CSS used within Kibana as
of EUI's completed Emotion migration, and can be safely removed. EUI
will shortly no longer distribute these static `.css` files (although
`.scss` src files will still remain exported for the near future).
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
---
.../src/render_utils.test.ts | 36 ++++++++-----------
.../src/render_utils.ts | 18 ++--------
.../src/rendering_service.test.ts | 9 -----
.../src/rendering_service.tsx | 1 -
packages/kbn-storybook/templates/index.ejs | 1 -
packages/kbn-ui-shared-deps-npm/index.js | 24 -------------
.../kbn-ui-shared-deps-npm/webpack.config.js | 2 --
.../plugins/canvas/shareable_runtime/index.ts | 1 -
.../plugins/canvas/storybook/addon/panel.tsx | 1 -
.../__snapshots__/prompt_page.test.tsx.snap | 4 +--
.../unauthenticated_page.test.tsx.snap | 4 +--
.../reset_session_page.test.tsx.snap | 4 +--
.../plugins/security/server/prompt_page.tsx | 2 --
x-pack/plugins/security/tsconfig.json | 1 -
14 files changed, 22 insertions(+), 86 deletions(-)
diff --git a/packages/core/rendering/core-rendering-server-internal/src/render_utils.test.ts b/packages/core/rendering/core-rendering-server-internal/src/render_utils.test.ts
index 85b3186a57c80..5cdf7ceb4ba39 100644
--- a/packages/core/rendering/core-rendering-server-internal/src/render_utils.test.ts
+++ b/packages/core/rendering/core-rendering-server-internal/src/render_utils.test.ts
@@ -55,39 +55,31 @@ describe('getCommonStylesheetPaths', () => {
describe('getStylesheetPaths', () => {
describe('when darkMode is `true`', () => {
- describe('when themeVersion is `v8`', () => {
- it('returns the correct list', () => {
- expect(
- getThemeStylesheetPaths({
- darkMode: true,
- themeVersion: 'v8',
- baseHref: '/base-path/buildShaShort',
- })
- ).toMatchInlineSnapshot(`
+ it('returns the correct list', () => {
+ expect(
+ getThemeStylesheetPaths({
+ darkMode: true,
+ baseHref: '/base-path/buildShaShort',
+ })
+ ).toMatchInlineSnapshot(`
Array [
- "/base-path/buildShaShort/bundles/kbn-ui-shared-deps-npm/kbn-ui-shared-deps-npm.v8.dark.css",
"/base-path/buildShaShort/ui/legacy_dark_theme.min.css",
]
`);
- });
});
});
describe('when darkMode is `false`', () => {
- describe('when themeVersion is `v8`', () => {
- it('returns the correct list', () => {
- expect(
- getThemeStylesheetPaths({
- darkMode: false,
- themeVersion: 'v8',
- baseHref: '/base-path/buildShaShort',
- })
- ).toMatchInlineSnapshot(`
+ it('returns the correct list', () => {
+ expect(
+ getThemeStylesheetPaths({
+ darkMode: false,
+ baseHref: '/base-path/buildShaShort',
+ })
+ ).toMatchInlineSnapshot(`
Array [
- "/base-path/buildShaShort/bundles/kbn-ui-shared-deps-npm/kbn-ui-shared-deps-npm.v8.light.css",
"/base-path/buildShaShort/ui/legacy_light_theme.min.css",
]
`);
- });
});
});
});
diff --git a/packages/core/rendering/core-rendering-server-internal/src/render_utils.ts b/packages/core/rendering/core-rendering-server-internal/src/render_utils.ts
index dd8782e67edff..776e385906d7f 100644
--- a/packages/core/rendering/core-rendering-server-internal/src/render_utils.ts
+++ b/packages/core/rendering/core-rendering-server-internal/src/render_utils.ts
@@ -8,7 +8,6 @@
*/
import { firstValueFrom } from 'rxjs';
-import UiSharedDepsNpm from '@kbn/ui-shared-deps-npm';
import * as UiSharedDepsSrc from '@kbn/ui-shared-deps-src';
import type { IConfigService } from '@kbn/config';
import type { BrowserLoggingConfig } from '@kbn/core-logging-common-internal';
@@ -57,28 +56,15 @@ export const getCommonStylesheetPaths = ({ baseHref }: { baseHref: string }) =>
export const getThemeStylesheetPaths = ({
darkMode,
- themeVersion,
baseHref,
}: {
darkMode: boolean;
- themeVersion: UiSharedDepsNpm.ThemeVersion;
baseHref: string;
}) => {
- const bundlesHref = getBundlesHref(baseHref);
return [
...(darkMode
- ? [
- `${bundlesHref}/kbn-ui-shared-deps-npm/${UiSharedDepsNpm.darkCssDistFilename(
- themeVersion
- )}`,
- `${baseHref}/ui/legacy_dark_theme.min.css`,
- ]
- : [
- `${bundlesHref}/kbn-ui-shared-deps-npm/${UiSharedDepsNpm.lightCssDistFilename(
- themeVersion
- )}`,
- `${baseHref}/ui/legacy_light_theme.min.css`,
- ]),
+ ? [`${baseHref}/ui/legacy_dark_theme.min.css`]
+ : [`${baseHref}/ui/legacy_light_theme.min.css`]),
];
};
diff --git a/packages/core/rendering/core-rendering-server-internal/src/rendering_service.test.ts b/packages/core/rendering/core-rendering-server-internal/src/rendering_service.test.ts
index b22697a494788..7f7f2f504411d 100644
--- a/packages/core/rendering/core-rendering-server-internal/src/rendering_service.test.ts
+++ b/packages/core/rendering/core-rendering-server-internal/src/rendering_service.test.ts
@@ -224,12 +224,10 @@ function renderTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledTimes(2);
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: true,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: false,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -380,7 +378,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: true,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -405,7 +402,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: false,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -428,7 +424,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: false,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -451,7 +446,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: true,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -474,7 +468,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: false,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -497,7 +490,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: false,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
@@ -520,7 +512,6 @@ function renderDarkModeTestCases(
expect(getThemeStylesheetPathsMock).toHaveBeenCalledWith({
darkMode: true,
- themeVersion: 'v8',
baseHref: '/mock-server-basepath',
});
});
diff --git a/packages/core/rendering/core-rendering-server-internal/src/rendering_service.tsx b/packages/core/rendering/core-rendering-server-internal/src/rendering_service.tsx
index a696328475853..44841ec0fbe3f 100644
--- a/packages/core/rendering/core-rendering-server-internal/src/rendering_service.tsx
+++ b/packages/core/rendering/core-rendering-server-internal/src/rendering_service.tsx
@@ -212,7 +212,6 @@ export class RenderingService {
const themeStylesheetPaths = (mode: boolean) =>
getThemeStylesheetPaths({
darkMode: mode,
- themeVersion,
baseHref: staticAssetsHrefBase,
});
const commonStylesheetPaths = getCommonStylesheetPaths({
diff --git a/packages/kbn-storybook/templates/index.ejs b/packages/kbn-storybook/templates/index.ejs
index bf40dfb9fd3ca..776b495447cec 100644
--- a/packages/kbn-storybook/templates/index.ejs
+++ b/packages/kbn-storybook/templates/index.ejs
@@ -27,7 +27,6 @@
-
<% if (typeof headHtmlSnippet !=='undefined' ) { %>
diff --git a/packages/kbn-ui-shared-deps-npm/index.js b/packages/kbn-ui-shared-deps-npm/index.js
index b8be150acc311..73aaf151e8f0f 100644
--- a/packages/kbn-ui-shared-deps-npm/index.js
+++ b/packages/kbn-ui-shared-deps-npm/index.js
@@ -44,28 +44,4 @@ module.exports = {
* Webpack loader for configuring the public path lookup from `window.__kbnPublicPath__`.
*/
publicPathLoader,
-
- /**
- * Filename of the light-theme css file in the distributable directory
- * @param {ThemeVersion} themeVersion
- */
- lightCssDistFilename(themeVersion) {
- if (themeVersion !== 'v8') {
- throw new Error(`unsupported theme version [${themeVersion}]`);
- }
-
- return 'kbn-ui-shared-deps-npm.v8.light.css';
- },
-
- /**
- * Filename of the dark-theme css file in the distributable directory
- * @param {ThemeVersion} themeVersion
- */
- darkCssDistFilename(themeVersion) {
- if (themeVersion !== 'v8') {
- throw new Error(`unsupported theme version [${themeVersion}]`);
- }
-
- return 'kbn-ui-shared-deps-npm.v8.dark.css';
- },
};
diff --git a/packages/kbn-ui-shared-deps-npm/webpack.config.js b/packages/kbn-ui-shared-deps-npm/webpack.config.js
index 4f3971d293489..3b16430aeb724 100644
--- a/packages/kbn-ui-shared-deps-npm/webpack.config.js
+++ b/packages/kbn-ui-shared-deps-npm/webpack.config.js
@@ -101,8 +101,6 @@ module.exports = (_, argv) => {
'tslib',
'uuid',
],
- 'kbn-ui-shared-deps-npm.v8.dark': ['@elastic/eui/dist/eui_theme_dark.css'],
- 'kbn-ui-shared-deps-npm.v8.light': ['@elastic/eui/dist/eui_theme_light.css'],
},
context: __dirname,
devtool: 'cheap-source-map',
diff --git a/x-pack/plugins/canvas/shareable_runtime/index.ts b/x-pack/plugins/canvas/shareable_runtime/index.ts
index 475989494c574..ca7b1441d7bb1 100644
--- a/x-pack/plugins/canvas/shareable_runtime/index.ts
+++ b/x-pack/plugins/canvas/shareable_runtime/index.ts
@@ -8,4 +8,3 @@
export * from './api';
import '@kbn/core-apps-server-internal/assets/legacy_light_theme.css';
import '../public/style/index.scss';
-import '@elastic/eui/dist/eui_theme_light.css';
diff --git a/x-pack/plugins/canvas/storybook/addon/panel.tsx b/x-pack/plugins/canvas/storybook/addon/panel.tsx
index c799df73bd8ae..d9883ab7ed086 100644
--- a/x-pack/plugins/canvas/storybook/addon/panel.tsx
+++ b/x-pack/plugins/canvas/storybook/addon/panel.tsx
@@ -9,7 +9,6 @@ import React, { useState } from 'react';
import { EuiResizableContainer } from '@elastic/eui';
import { StateChange } from './components/state_change';
-import '@elastic/eui/dist/eui_theme_light.css';
import './panel.css';
import { RecordedAction } from './types';
diff --git a/x-pack/plugins/security/server/__snapshots__/prompt_page.test.tsx.snap b/x-pack/plugins/security/server/__snapshots__/prompt_page.test.tsx.snap
index 0556a61dbf47b..df1140ea44828 100644
--- a/x-pack/plugins/security/server/__snapshots__/prompt_page.test.tsx.snap
+++ b/x-pack/plugins/security/server/__snapshots__/prompt_page.test.tsx.snap
@@ -1,5 +1,5 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`PromptPage renders as expected with additional scripts 1`] = `"ElasticMockedFonts
Some Title
Some Body
Action#1
Action#2
"`;
+exports[`PromptPage renders as expected with additional scripts 1`] = `"ElasticMockedFonts
Some Title
Some Body
Action#1
Action#2
"`;
-exports[`PromptPage renders as expected without additional scripts 1`] = `"ElasticMockedFonts
Some Title
Some Body
Action#1
Action#2
"`;
+exports[`PromptPage renders as expected without additional scripts 1`] = `"ElasticMockedFonts
Some Title
Some Body
Action#1
Action#2
"`;
diff --git a/x-pack/plugins/security/server/authentication/__snapshots__/unauthenticated_page.test.tsx.snap b/x-pack/plugins/security/server/authentication/__snapshots__/unauthenticated_page.test.tsx.snap
index 127d02f38b08e..2466a01112102 100644
--- a/x-pack/plugins/security/server/authentication/__snapshots__/unauthenticated_page.test.tsx.snap
+++ b/x-pack/plugins/security/server/authentication/__snapshots__/unauthenticated_page.test.tsx.snap
@@ -1,5 +1,5 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`UnauthenticatedPage renders as expected 1`] = `"ElasticMockedFonts
We hit an authentication error
Try logging in again, and if the problem persists, contact your system administrator.