Skip to content

Commit

Permalink
[8.x] 🌊 Add failure store and exists conditions (#200861) (#202614)
Browse files Browse the repository at this point in the history
# Backport

This will backport the following commits from `main` to `8.x`:
- [🌊 Add failure store and exists conditions
(#200861)](#200861)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Joe
Reuter","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-11-27T08:57:30Z","message":"🌊
Add failure store and exists conditions (#200861)\n\nThis PR enables
failure store for all streams-managed data streams and\r\nalso adds
exists and notExists operators to the painless
condition\r\nbuilder.\r\n\r\nWith this addition I think we have all the
important ones covered, we\r\ncan discuss whether we should add a
\"matches regex\" operator, but I'm\r\nnot sure whether it is a good
idea to do so.\r\n\r\nThis also fixes a problem with rollovers - in data
streams `@timestamp`\r\nis not allowed to use `ignore_malformed`, but
it's assuming it\r\nautomatically, so a rollover is always triggered.
This is fixed by\r\nsetting `ignore_malformed` to false explicitly for
the timestamp.\r\n\r\n---------\r\n\r\nCo-authored-by: Chris Cowan
<[email protected]>\r\nCo-authored-by: kibanamachine
<[email protected]>","sha":"6dba9263e089649b808e152b4cb54f01b4b52fca","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:prev-minor","ci:project-deploy-observability","Team:obs-ux-logs","Team:obs-ux-infra_services","v8.18.0","Feature:Streams"],"title":"🌊
Add failure store and exists
conditions","number":200861,"url":"https://github.com/elastic/kibana/pull/200861","mergeCommit":{"message":"🌊
Add failure store and exists conditions (#200861)\n\nThis PR enables
failure store for all streams-managed data streams and\r\nalso adds
exists and notExists operators to the painless
condition\r\nbuilder.\r\n\r\nWith this addition I think we have all the
important ones covered, we\r\ncan discuss whether we should add a
\"matches regex\" operator, but I'm\r\nnot sure whether it is a good
idea to do so.\r\n\r\nThis also fixes a problem with rollovers - in data
streams `@timestamp`\r\nis not allowed to use `ignore_malformed`, but
it's assuming it\r\nautomatically, so a rollover is always triggered.
This is fixed by\r\nsetting `ignore_malformed` to false explicitly for
the timestamp.\r\n\r\n---------\r\n\r\nCo-authored-by: Chris Cowan
<[email protected]>\r\nCo-authored-by: kibanamachine
<[email protected]>","sha":"6dba9263e089649b808e152b4cb54f01b4b52fca"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/200861","number":200861,"mergeCommit":{"message":"🌊
Add failure store and exists conditions (#200861)\n\nThis PR enables
failure store for all streams-managed data streams and\r\nalso adds
exists and notExists operators to the painless
condition\r\nbuilder.\r\n\r\nWith this addition I think we have all the
important ones covered, we\r\ncan discuss whether we should add a
\"matches regex\" operator, but I'm\r\nnot sure whether it is a good
idea to do so.\r\n\r\nThis also fixes a problem with rollovers - in data
streams `@timestamp`\r\nis not allowed to use `ignore_malformed`, but
it's assuming it\r\nautomatically, so a rollover is always triggered.
This is fixed by\r\nsetting `ignore_malformed` to false explicitly for
the timestamp.\r\n\r\n---------\r\n\r\nCo-authored-by: Chris Cowan
<[email protected]>\r\nCo-authored-by: kibanamachine
<[email protected]>","sha":"6dba9263e089649b808e152b4cb54f01b4b52fca"}},{"branch":"8.x","label":"v8.18.0","branchLabelMappingKey":"^v8.18.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Joe Reuter <[email protected]>
  • Loading branch information
kibanamachine and flash1293 authored Dec 3, 2024
1 parent 68ef0c3 commit 8ce1638
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 4 deletions.
14 changes: 13 additions & 1 deletion x-pack/plugins/streams/common/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,25 @@ import { z } from '@kbn/zod';

const stringOrNumberOrBoolean = z.union([z.string(), z.number(), z.boolean()]);

export const filterConditionSchema = z.object({
export const binaryConditionSchema = z.object({
field: z.string(),
operator: z.enum(['eq', 'neq', 'lt', 'lte', 'gt', 'gte', 'contains', 'startsWith', 'endsWith']),
value: stringOrNumberOrBoolean,
});

export const unaryFilterConditionSchema = z.object({
field: z.string(),
operator: z.enum(['exists', 'notExists']),
});

export const filterConditionSchema = z.discriminatedUnion('operator', [
unaryFilterConditionSchema,
binaryConditionSchema,
]);

export type FilterCondition = z.infer<typeof filterConditionSchema>;
export type BinaryFilterCondition = z.infer<typeof binaryConditionSchema>;
export type UnaryFilterCondition = z.infer<typeof unaryFilterConditionSchema>;

export interface AndCondition {
and: Condition[];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import {
ClusterPutComponentTemplateRequest,
MappingDateProperty,
MappingProperty,
} from '@elastic/elasticsearch/lib/api/types';
import { StreamDefinition } from '../../../../common/types';
Expand All @@ -21,9 +22,14 @@ export function generateLayer(
): ClusterPutComponentTemplateRequest {
const properties: Record<string, MappingProperty> = {};
definition.fields.forEach((field) => {
properties[field.name] = {
const property: MappingProperty = {
type: field.type,
};
if (field.name === '@timestamp') {
// @timestamp can't ignore malformed dates as it's used for sorting in logsdb
(property as MappingDateProperty).ignore_malformed = false;
}
properties[field.name] = property;
});
return {
name: getComponentTemplateName(id),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,14 @@ const operatorConditionAndResults = [
condition: { field: 'log.logger', operator: 'contains' as const, value: 'proxy' },
result: '(ctx.log?.logger !== null && ctx.log?.logger.contains("proxy"))',
},
{
condition: { field: 'log.logger', operator: 'exists' as const },
result: 'ctx.log?.logger !== null',
},
{
condition: { field: 'log.logger', operator: 'notExists' as const },
result: 'ctx.log?.logger == null',
},
];

describe('conditionToPainless', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
import { isBoolean, isString } from 'lodash';
import {
AndCondition,
BinaryFilterCondition,
Condition,
conditionSchema,
FilterCondition,
filterConditionSchema,
RerouteOrCondition,
UnaryFilterCondition,
} from '../../../../common/types';

function isFilterCondition(subject: any): subject is FilterCondition {
Expand Down Expand Up @@ -44,7 +46,7 @@ function encodeValue(value: string | number | boolean) {
return value;
}

function toPainless(condition: FilterCondition) {
function binaryToPainless(condition: BinaryFilterCondition) {
switch (condition.operator) {
case 'neq':
return `${safePainlessField(condition)} != ${encodeValue(condition.value)}`;
Expand All @@ -67,9 +69,25 @@ function toPainless(condition: FilterCondition) {
}
}

function unaryToPainless(condition: UnaryFilterCondition) {
switch (condition.operator) {
case 'notExists':
return `${safePainlessField(condition)} == null`;
default:
return `${safePainlessField(condition)} !== null`;
}
}

function isUnaryFilterCondition(subject: FilterCondition): subject is UnaryFilterCondition {
return !('value' in subject);
}

export function conditionToPainless(condition: Condition, nested = false): string {
if (isFilterCondition(condition)) {
return `(${safePainlessField(condition)} !== null && ${toPainless(condition)})`;
if (isUnaryFilterCondition(condition)) {
return unaryToPainless(condition);
}
return `(${safePainlessField(condition)} !== null && ${binaryToPainless(condition)})`;
}
if (isAndCondition(condition)) {
const and = condition.and.map((filter) => conditionToPainless(filter, true)).join(' && ');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export function generateIndexTemplate(id: string) {
},
data_stream: {
hidden: false,
failure_store: true,
},
template: {
settings: {
Expand Down

0 comments on commit 8ce1638

Please sign in to comment.