From 56bba0dd751eb6604c27be0ed8dd7cc62b2121dd Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Fri, 27 Jan 2023 19:46:45 +0100 Subject: [PATCH 01/18] Added integrations tests for checking workflow creation and update scenario Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 9 +- .../model/workflow/WorkflowRunResult.kt | 3 +- .../TransportIndexCompositeWorkflowAction.kt | 79 +++-- .../alerting/workflow/WorkflowRunner.kt | 1 - .../workflow/WorkflowRunnerService.kt | 2 - .../alerting/MonitorDataSourcesIT.kt | 1 - .../org/opensearch/alerting/TestHelpers.kt | 112 ++++-- .../opensearch/alerting/WorkflowMonitorIT.kt | 327 ++++++++++++++++++ .../transport/WorkflowSingleNodeTestCase.kt | 58 ++++ core/build.gradle | 2 +- 10 files changed, 521 insertions(+), 73 deletions(-) create mode 100644 alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt create mode 100644 alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 66ebd5eb3..61439ea70 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -51,6 +51,7 @@ import org.opensearch.alerting.transport.TransportGetEmailAccountAction import org.opensearch.alerting.transport.TransportGetEmailGroupAction import org.opensearch.alerting.transport.TransportGetFindingsSearchAction import org.opensearch.alerting.transport.TransportGetMonitorAction +import org.opensearch.alerting.transport.TransportIndexCompositeWorkflowAction import org.opensearch.alerting.transport.TransportIndexMonitorAction import org.opensearch.alerting.transport.TransportSearchEmailAccountAction import org.opensearch.alerting.transport.TransportSearchEmailGroupAction @@ -80,6 +81,7 @@ import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Workflow import org.opensearch.env.Environment import org.opensearch.env.NodeEnvironment import org.opensearch.index.IndexModule @@ -180,8 +182,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(SearchEmailGroupAction.INSTANCE, TransportSearchEmailGroupAction::class.java), ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java) - + ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexCompositeWorkflowAction::class.java) ) } @@ -193,7 +195,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R QueryLevelTrigger.XCONTENT_REGISTRY, BucketLevelTrigger.XCONTENT_REGISTRY, ClusterMetricsInput.XCONTENT_REGISTRY, - DocumentLevelTrigger.XCONTENT_REGISTRY + DocumentLevelTrigger.XCONTENT_REGISTRY, + Workflow.XCONTENT_REGISTRY ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt index dc643e716..cc6b61745 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt @@ -1,4 +1,3 @@ package org.opensearch.alerting.model.workflow -data class WorkflowRunResult { -} +data class WorkflowRunResult(private val someArg: String) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt index a6273a91c..1ea721b39 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt @@ -166,7 +166,13 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( ) { fun resolveUserAndStart() { scope.launch { - validateRequest(request, actionListener) + try { + validateRequest(request) + } catch (e: Exception) { + actionListener.onFailure(e) + return@launch + } + if (user == null) { // Security is disabled, add empty user to Monitor. user is null for older versions. request.workflow = request.workflow @@ -456,68 +462,72 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( } } - suspend fun validateRequest(request: IndexWorkflowRequest, listener: ActionListener) { - val compositeInput = request.workflow.inputs.get(0) as CompositeInput + suspend fun validateRequest(request: IndexWorkflowRequest) { + val compositeInput = request.workflow.inputs[0] as CompositeInput val monitorIds = compositeInput.sequence.delegates.stream().map { it.monitorId }.collect(Collectors.toList()) - validateDuplicateDelegateMonitorReferenceExists(monitorIds, listener) - validateSequenceOrdering(compositeInput.sequence.delegates, listener) - validateChainedFindings(compositeInput.sequence.delegates, listener) - val delegateMonitors = getDelegateMonitors(monitorIds, listener) - validateDelegateMonitorsExist(monitorIds, delegateMonitors, listener) + + if (monitorIds.isNullOrEmpty()) + throw AlertingException.wrap(IllegalArgumentException("Delegates list can not be empty.")) + + validateDuplicateDelegateMonitorReferenceExists(monitorIds) + validateSequenceOrdering(compositeInput.sequence.delegates) + validateChainedFindings(compositeInput.sequence.delegates) + val delegateMonitors = getDelegateMonitors(monitorIds) + validateDelegateMonitorsExist(monitorIds, delegateMonitors) // todo: validate that user has roles to reference delegate monitors } - private fun validateChainedFindings(delegates: List, listener: ActionListener) { + private fun validateChainedFindings(delegates: List) { val monitorIdOrderMap: Map = delegates.associate { it.monitorId to it.order } delegates.forEach { if (it.chainedFindings != null) { if (monitorIdOrderMap.containsKey(it.chainedFindings!!.monitorId) == false) { - listener.onFailure(Exception("Chained Findings Monitor ${it.chainedFindings!!.monitorId} doesn't exist in sequence")) + throw AlertingException.wrap( + IllegalArgumentException( + "Chained Findings Monitor ${it.chainedFindings!!.monitorId} doesn't exist in sequence" + ) + ) } - if (it.order <= monitorIdOrderMap.get(it.chainedFindings!!.monitorId)!!) { - listener.onFailure( - Exception( + if (it.order <= monitorIdOrderMap[it.chainedFindings!!.monitorId]!!) + throw AlertingException.wrap( + IllegalArgumentException( "Chained Findings Monitor ${it.chainedFindings!!.monitorId} should be executed before monitor ${it.monitorId}" ) ) - } } } } - private fun validateSequenceOrdering(delegates: List, listener: ActionListener) { + private fun validateSequenceOrdering(delegates: List) { val orderSet = delegates.stream().filter { it.order > 0 }.map { it.order }.collect(Collectors.toSet()) if (orderSet.size != delegates.size) { - listener.onFailure(Exception("Sequence ordering of delegate monitor shouldn't contain duplicate order values")) + throw AlertingException.wrap(IllegalArgumentException("Sequence ordering of delegate monitor shouldn't contain duplicate order values")) } } private fun validateDuplicateDelegateMonitorReferenceExists( - monitorIds: MutableList, - listener: ActionListener + monitorIds: MutableList ) { if (monitorIds.toSet().size != monitorIds.size) { - listener.onFailure(Exception("duplicate is not allowed")) + throw AlertingException.wrap(IllegalArgumentException("Duplicate delegates not allowed")) } } private fun validateDelegateMonitorsExist( monitorIds: List, - delegateMonitors: List, - actionListener: ActionListener + delegateMonitors: List ) { val reqMonitorIds: MutableList = monitorIds as MutableList delegateMonitors.forEach { reqMonitorIds.remove(it.id) } if (reqMonitorIds.isNotEmpty()) { - actionListener.onFailure(Exception("${reqMonitorIds.joinToString { "," }} are not valid monitor ids")) + throw AlertingException.wrap(IllegalArgumentException(("${reqMonitorIds.joinToString()} are not valid monitor ids"))) } } private suspend fun getDelegateMonitors( - monitorIds: MutableList, - actionListener: ActionListener + monitorIds: MutableList ): List { val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitorIds)) val searchSource = SearchSourceBuilder().query(query) @@ -527,20 +537,15 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( if (response.isTimedOut) { return monitors } - try { - for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) - monitors.add(monitor as Monitor) - } + for (hit in response.hits) { + XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.sourceAsString + ).use { hitsParser -> + val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) + monitors.add(monitor as Monitor) } - return monitors - } catch (e: Exception) { - actionListener.onFailure(e) - return listOf() } + return monitors } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt index a09d1cc99..0fc989800 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt @@ -2,7 +2,6 @@ package org.opensearch.alerting.workflow import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.model.MonitorRunResult -import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.Workflow import java.time.Instant diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt index f99d9a802..6379f2f55 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt @@ -180,7 +180,6 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo } override fun postDelete(jobId: String) { - } override fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant) { @@ -195,7 +194,6 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): MonitorRunResult<*> { val workflow = job as Workflow return CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun) - } // TODO: See if we can move below methods (or few of these) to a common utils diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt index 5afc1b7a5..019d99407 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt @@ -203,7 +203,6 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val finding = Finding.parse(xcp) findings1.add(finding) } - logger.error("sashank: response: {}", finalQueryResponse) val indexToRelatedDocIdsMap = mutableMapOf>() for (finding in findings1) { val ids = indexToRelatedDocIdsMap.getOrDefault(index, mutableListOf()) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index a4e3eb347..4e828b12c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -39,8 +39,11 @@ import org.opensearch.commons.alerting.model.ActionExecutionResult import org.opensearch.commons.alerting.model.AggregationResultBucket import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.ChainedFindings import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.Delegate import org.opensearch.commons.alerting.model.DocLevelMonitorInput import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.commons.alerting.model.DocumentLevelTrigger @@ -51,7 +54,10 @@ import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger import org.opensearch.commons.alerting.model.Schedule import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Sequence import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.Workflow.WorkflowType import org.opensearch.commons.alerting.model.action.Action import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy import org.opensearch.commons.alerting.model.action.ActionExecutionScope @@ -84,7 +90,7 @@ fun randomQueryLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -102,7 +108,7 @@ fun randomQueryLevelMonitorWithoutUser( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -126,7 +132,7 @@ fun randomBucketLevelMonitor( triggers: List = (1..randomInt(10)).map { randomBucketLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -151,7 +157,7 @@ fun randomBucketLevelMonitor( enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), withMetadata: Boolean = false, - dataSources: DataSources + dataSources: DataSources, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -170,7 +176,7 @@ fun randomClusterMetricsMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR, enabled = enabled, inputs = inputs, @@ -188,7 +194,7 @@ fun randomDocumentLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false + withMetadata: Boolean = false, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -208,7 +214,7 @@ fun randomDocumentLevelMonitor( lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), withMetadata: Boolean = false, dataSources: DataSources, - owner: String? = null + owner: String? = null, ): Monitor { return Monitor( name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, @@ -217,13 +223,63 @@ fun randomDocumentLevelMonitor( ) } +fun randomWorkflowMonitor( + monitorIds: List, + name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), + user: User? = randomUser(), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = randomBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS) +): Workflow { + val delegates = mutableListOf() + if (!monitorIds.isNullOrEmpty()) { + delegates.add(Delegate(1, monitorIds[0])) + for (i in 1 until monitorIds.size) { + delegates.add(Delegate(i + 1, monitorIds [i], ChainedFindings(monitorIds[i - 1]))) + } + } + + return Workflow( + name = name, + enabled = enabled, + schedule = schedule, + lastUpdateTime = lastUpdateTime, + enabledTime = enabledTime, + workflowType = WorkflowType.COMPOSITE, + user = user, + inputs = listOf(CompositeInput(Sequence(delegates))) + ) +} + +fun randomWorkflowMonitorWithDelegates( + delegates: List, + name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), + user: User? = randomUser(), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = randomBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), +): Workflow { + return Workflow( + name = name, + enabled = enabled, + schedule = schedule, + lastUpdateTime = lastUpdateTime, + enabledTime = enabledTime, + workflowType = WorkflowType.COMPOSITE, + user = user, + inputs = listOf(CompositeInput(Sequence(delegates))) + ) +} + fun randomQueryLevelTrigger( id: String = UUIDs.base64UUID(), name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): QueryLevelTrigger { return QueryLevelTrigger( id = id, @@ -240,7 +296,7 @@ fun randomBucketLevelTrigger( severity: String = "1", bucketSelector: BucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder(name = id), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): BucketLevelTrigger { return BucketLevelTrigger( id = id, @@ -260,7 +316,7 @@ fun randomDocumentLevelTrigger( severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" + destinationId: String = "", ): DocumentLevelTrigger { return DocumentLevelTrigger( id = id, @@ -278,14 +334,14 @@ fun randomBucketSelectorExtAggregationBuilder( bucketsPathsMap: MutableMap = mutableMapOf("avg" to "10"), script: Script = randomBucketSelectorScript(params = bucketsPathsMap), parentBucketPath: String = "testPath", - filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")) + filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")), ): BucketSelectorExtAggregationBuilder { return BucketSelectorExtAggregationBuilder(name, bucketsPathsMap, script, parentBucketPath, filter) } fun randomBucketSelectorScript( idOrCode: String = "params.avg >= 0", - params: Map = mutableMapOf("avg" to "10") + params: Map = mutableMapOf("avg" to "10"), ): Script { return Script(Script.DEFAULT_SCRIPT_TYPE, Script.DEFAULT_SCRIPT_LANG, idOrCode, emptyMap(), params) } @@ -298,7 +354,7 @@ fun randomEmailAccount( port: Int = randomIntBetween(1, 100), method: EmailAccount.MethodType = randomEmailAccountMethod(), username: SecureString? = null, - password: SecureString? = null + password: SecureString? = null, ): EmailAccount { return EmailAccount( name = name, @@ -316,7 +372,7 @@ fun randomEmailGroup( name: String = salt + OpenSearchRestTestCase.randomAlphaOfLength(10), emails: List = (1..randomInt(10)).map { EmailEntry(email = salt + OpenSearchRestTestCase.randomAlphaOfLength(5) + "@email.com") - } + }, ): EmailGroup { return EmailGroup(name = name, emails = emails) } @@ -342,7 +398,7 @@ val TERM_DLS_QUERY = """{\"term\": { \"accessible\": true}}""" fun randomTemplateScript( source: String, - params: Map = emptyMap() + params: Map = emptyMap(), ): Script = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, source, params) fun randomAction( @@ -350,7 +406,7 @@ fun randomAction( template: Script = randomTemplateScript("Hello World"), destinationId: String = "", throttleEnabled: Boolean = false, - throttle: Throttle = randomThrottle() + throttle: Throttle = randomThrottle(), ) = Action(name, destinationId, template, template, throttleEnabled, throttle, actionExecutionPolicy = null) fun randomActionWithPolicy( @@ -359,7 +415,7 @@ fun randomActionWithPolicy( destinationId: String = "", throttleEnabled: Boolean = false, throttle: Throttle = randomThrottle(), - actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy() + actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy(), ): Action { return if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { // Return null for throttle when using PerExecutionActionScope since throttling is currently not supported for it @@ -371,11 +427,11 @@ fun randomActionWithPolicy( fun randomThrottle( value: Int = randomIntBetween(60, 120), - unit: ChronoUnit = ChronoUnit.MINUTES + unit: ChronoUnit = ChronoUnit.MINUTES, ) = Throttle(value, unit) fun randomActionExecutionPolicy( - actionExecutionScope: ActionExecutionScope = randomActionExecutionScope() + actionExecutionScope: ActionExecutionScope = randomActionExecutionScope(), ) = ActionExecutionPolicy(actionExecutionScope) fun randomActionExecutionScope(): ActionExecutionScope { @@ -400,7 +456,7 @@ fun randomDocLevelQuery( id: String = OpenSearchRestTestCase.randomAlphaOfLength(10), query: String = OpenSearchRestTestCase.randomAlphaOfLength(10), name: String = "${randomInt(5)}", - tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) } + tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, ): DocLevelQuery { return DocLevelQuery(id = id, query = query, name = name, tags = tags) } @@ -408,7 +464,7 @@ fun randomDocLevelQuery( fun randomDocLevelMonitorInput( description: String = OpenSearchRestTestCase.randomAlphaOfLength(randomInt(10)), indices: List = listOf(1..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, - queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() } + queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() }, ): DocLevelMonitorInput { return DocLevelMonitorInput(description = description, indices = indices, queries = queries) } @@ -420,7 +476,7 @@ fun randomFinding( monitorName: String = OpenSearchRestTestCase.randomAlphaOfLength(10), index: String = OpenSearchRestTestCase.randomAlphaOfLength(10), docLevelQueries: List = listOf(randomDocLevelQuery()), - timestamp: Instant = Instant.now() + timestamp: Instant = Instant.now(), ): Finding { return Finding( id = id, @@ -456,7 +512,7 @@ fun randomEmailAccountMethod(): EmailAccount.MethodType { fun randomActionExecutionResult( actionId: String = UUIDs.base64UUID(), lastExecutionTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - throttledCount: Int = randomInt() + throttledCount: Int = randomInt(), ) = ActionExecutionResult(actionId, lastExecutionTime, throttledCount) fun randomQueryLevelMonitorRunResult(): MonitorRunResult { @@ -518,7 +574,7 @@ fun randomQueryLevelTriggerRunResult(): QueryLevelTriggerRunResult { fun randomClusterMetricsInput( path: String = ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath, pathParams: String = "", - url: String = "" + url: String = "", ): ClusterMetricsInput { return ClusterMetricsInput(path, pathParams, url) } @@ -617,7 +673,7 @@ fun RestClient.makeRequest( endpoint: String, params: Map = emptyMap(), entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) // TODO: remove PERMISSIVE option after moving system index access to REST API call @@ -642,7 +698,7 @@ fun RestClient.makeRequest( method: String, endpoint: String, entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) val options = RequestOptions.DEFAULT.toBuilder() @@ -686,3 +742,7 @@ fun assertUserNull(map: Map) { fun assertUserNull(monitor: Monitor) { assertNull("User is not null", monitor.user) } + +fun assertUserNull(workflow: Workflow) { + assertNull("User is not null", workflow.user) +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt new file mode 100644 index 000000000..239eba265 --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -0,0 +1,327 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.commons.alerting.model.ChainedFindings +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.rest.RestRequest +import java.util.Collections + +class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { + + fun `test create workflow success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", workflow.inputs, workflowById.inputs) + + // Delegate verification + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 2, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedFindings!!.monitorId + ) + } + + fun `test update workflow success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val monitor3 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + val monitorResponse3 = createMonitor(monitor3)!! + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id, monitorResponse3.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 3, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedFindings!!.monitorId + ) + + val delegate3 = delegates[2] + assertNotNull(delegate3) + assertEquals("Delegate3 order not correct", 3, delegate3.order) + assertEquals("Delegate3 id not correct", monitorResponse3.id, delegate3.monitorId) + assertEquals( + "Delegate3 Chained finding not correct", monitorResponse2.id, delegate3.chainedFindings!!.monitorId + ) + } + + fun `test create workflow without delegate failure`() { + val workflow = randomWorkflowMonitor( + monitorIds = Collections.emptyList() + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Delegates list can not be empty.") + ) + } + } + } + + fun `test create workflow duplicate delegate failure`() { + val workflow = randomWorkflowMonitor( + monitorIds = listOf("1", "1", "2") + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Duplicate delegates not allowed") + ) + } + } + } + + fun `test create workflow delegate monitor doesn't exist failure`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + val monitorResponse = createMonitor(monitor)!! + + val workflow = randomWorkflowMonitor( + monitorIds = listOf("-1", monitorResponse.id) + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("are not valid monitor ids") + ) + } + } + } + + fun `test create workflow sequence order not correct failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3") + ) + val workflow = randomWorkflowMonitorWithDelegates( + delegates = delegates + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + ) + } + } + } + + fun `test create workflow chained findings monitor not in sequence failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedFindings("monitor-x")) + ) + val workflow = randomWorkflowMonitorWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + ) + } + } + } + + fun `test create workflow chained findings order not correct failure`() { + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedFindings("monitor-2")) + ) + val workflow = randomWorkflowMonitorWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + ) + } + } + } +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt new file mode 100644 index 000000000..3c1eeee79 --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope +import org.opensearch.action.support.WriteRequest +import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.IndexWorkflowRequest +import org.opensearch.commons.alerting.action.IndexWorkflowResponse +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.index.query.TermQueryBuilder +import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.rest.RestRequest +import org.opensearch.search.builder.SearchSourceBuilder +/** + * A test that keep a singleton node started for all tests that can be used to get + * references to Guice injectors in unit tests. + */ + +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) +abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { + + protected fun searchWorkflow(id: String, indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, refresh: Boolean = true): Workflow? { + try { + if (refresh) refreshIndex(indices) + } catch (e: Exception) { + logger.warn("Could not refresh index $indices because: ${e.message}") + return null + } + val ssb = SearchSourceBuilder() + ssb.version(true) + ssb.query(TermQueryBuilder("_id", id)) + val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() + + return searchResponse.hits.hits.map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + Workflow.parse(xcp, it.id, it.version) + }.first() + } + + protected fun upsertWorkflow(workflow: Workflow, id: String = Workflow.NO_ID, method: RestRequest.Method = RestRequest.Method.POST): IndexWorkflowResponse? { + val request = IndexWorkflowRequest( + workflowId = id, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), + method = method, + workflow = workflow + ) + + return client().execute(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, request).actionGet() + } +} diff --git a/core/build.gradle b/core/build.gradle index ce258112b..f4432bb06 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -15,7 +15,7 @@ dependencies { implementation "com.cronutils:cron-utils:9.1.6" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation 'com.google.googlejavaformat:google-java-format:1.10.0' - api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") + api files("/home/stevan/git/opensearch/repo/common-utils/build/libs/common-utils-2.5.0.0-SNAPSHOT.jar") implementation 'commons-validator:commons-validator:1.7' testImplementation "org.opensearch.test:framework:${opensearch_version}" testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" From e0af3059598db04a8c6a94eb473730e5b76d0e5d Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Tue, 31 Jan 2023 18:58:51 +0100 Subject: [PATCH 02/18] Added transport layer for getting and deleting the workflow Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 6 +- .../TransportDeleteWorkflowAction.kt | 145 ++++++++++++++ .../transport/TransportGetWorkflowAction.kt | 41 ++-- .../opensearch/alerting/WorkflowMonitorIT.kt | 183 +++++++++++++++++- .../transport/WorkflowSingleNodeTestCase.kt | 12 ++ .../resources/mappings/scheduled-jobs.json | 39 ++-- 6 files changed, 395 insertions(+), 31 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 61439ea70..c4151527e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -44,6 +44,7 @@ import org.opensearch.alerting.settings.LegacyOpenDistroAlertingSettings import org.opensearch.alerting.settings.LegacyOpenDistroDestinationSettings import org.opensearch.alerting.transport.TransportAcknowledgeAlertAction import org.opensearch.alerting.transport.TransportDeleteMonitorAction +import org.opensearch.alerting.transport.TransportDeleteWorkflowAction import org.opensearch.alerting.transport.TransportExecuteMonitorAction import org.opensearch.alerting.transport.TransportGetAlertsAction import org.opensearch.alerting.transport.TransportGetDestinationsAction @@ -51,6 +52,7 @@ import org.opensearch.alerting.transport.TransportGetEmailAccountAction import org.opensearch.alerting.transport.TransportGetEmailGroupAction import org.opensearch.alerting.transport.TransportGetFindingsSearchAction import org.opensearch.alerting.transport.TransportGetMonitorAction +import org.opensearch.alerting.transport.TransportGetWorkflowAction import org.opensearch.alerting.transport.TransportIndexCompositeWorkflowAction import org.opensearch.alerting.transport.TransportIndexMonitorAction import org.opensearch.alerting.transport.TransportSearchEmailAccountAction @@ -183,7 +185,9 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexCompositeWorkflowAction::class.java) + ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexCompositeWorkflowAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ACTION_TYPE, TransportGetWorkflowAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java) ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt new file mode 100644 index 000000000..e0c011c45 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -0,0 +1,145 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import kotlinx.coroutines.CoroutineName +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.GlobalScope +import kotlinx.coroutines.launch +import org.opensearch.OpenSearchStatusException +import org.opensearch.action.ActionListener +import org.opensearch.action.ActionRequest +import org.opensearch.action.delete.DeleteRequest +import org.opensearch.action.delete.DeleteResponse +import org.opensearch.action.get.GetRequest +import org.opensearch.action.get.GetResponse +import org.opensearch.action.support.ActionFilters +import org.opensearch.action.support.HandledTransportAction +import org.opensearch.alerting.opensearchapi.suspendUntil +import org.opensearch.alerting.settings.AlertingSettings +import org.opensearch.alerting.util.AlertingException +import org.opensearch.client.Client +import org.opensearch.cluster.service.ClusterService +import org.opensearch.common.inject.Inject +import org.opensearch.common.settings.Settings +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest +import org.opensearch.commons.alerting.action.DeleteWorkflowResponse +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.authuser.User +import org.opensearch.commons.utils.recreateObject +import org.opensearch.rest.RestStatus +import org.opensearch.tasks.Task +import org.opensearch.transport.TransportService + +class TransportDeleteWorkflowAction @Inject constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry +) : HandledTransportAction( + AlertingActions.DELETE_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::DeleteWorkflowRequest +), + SecureTransportAction { + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) + } + + override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { + val transformedRequest = request as? DeleteWorkflowRequest + ?: recreateObject(request) { DeleteWorkflowRequest(it) } + + val user = readUserFromThreadContext(client) + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) + .setRefreshPolicy(transformedRequest.refreshPolicy) + + if (!validateUserBackendRoles(user, actionListener)) { + return + } + + GlobalScope.launch(Dispatchers.IO + CoroutineName("DeleteWorkflowAction")) { + DeleteWorkflowHandler(client, actionListener, deleteRequest, user, transformedRequest.workflowId).resolveUserAndStart() + } + } + + inner class DeleteWorkflowHandler( + private val client: Client, + private val actionListener: ActionListener, + private val deleteRequest: DeleteRequest, + private val user: User?, + private val workflowId: String + ) { + suspend fun resolveUserAndStart() { + try { + val workflow = getWorkflow() + + val canDelete = user == null || + !doFilterForUser(user) || + checkUserPermissionsWithResource( + user, + workflow.user, + actionListener, + "workflow", + workflowId + ) + + if (canDelete) { + val deleteResponse = deleteWorkflow(workflow) + // TODO - uncomment once the workflow metadata is added + // deleteMetadata(workflow) + actionListener.onResponse(DeleteWorkflowResponse(deleteResponse.id, deleteResponse.version)) + } else { + actionListener.onFailure( + AlertingException( + "Not allowed to delete this workflow!", + RestStatus.FORBIDDEN, + IllegalStateException() + ) + ) + } + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + } + + private suspend fun getWorkflow(): Workflow { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) + + val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } + if (getResponse.isExists == false) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Workflow with $workflowId is not found", RestStatus.NOT_FOUND) + ) + ) + } + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, XContentType.JSON + ) + return ScheduledJob.parse(xcp, getResponse.id, getResponse.version) as Workflow + } + + private suspend fun deleteWorkflow(workflow: Workflow): DeleteResponse { + return client.suspendUntil { delete(deleteRequest, it) } + } + + private suspend fun deleteMetadata(workflow: Workflow) { + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, "${workflow.id}-metadata") + val deleteResponse: DeleteResponse = client.suspendUntil { delete(deleteRequest, it) } + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt index 80c61ad85..fcd3ffd52 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt @@ -1,3 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.transport import org.opensearch.OpenSearchStatusException @@ -6,9 +11,6 @@ import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.alerting.action.GetMonitorAction -import org.opensearch.alerting.action.GetMonitorRequest -import org.opensearch.alerting.action.GetMonitorResponse import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.client.Client @@ -19,8 +21,11 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType -import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowResponse import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -32,8 +37,8 @@ class TransportGetWorkflowAction @Inject constructor( val xContentRegistry: NamedXContentRegistry, val clusterService: ClusterService, settings: Settings -) : HandledTransportAction( - GetMonitorAction.NAME, transportService, actionFilters, ::GetMonitorRequest +) : HandledTransportAction( + AlertingActions.GET_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::GetWorkflowRequest ), SecureTransportAction { @@ -43,12 +48,12 @@ class TransportGetWorkflowAction @Inject constructor( listenFilterBySettingChange(clusterService) } - override fun doExecute(task: Task, getMonitorRequest: GetMonitorRequest, actionListener: ActionListener) { + override fun doExecute(task: Task, getWorkflowRequest: GetWorkflowRequest, actionListener: ActionListener) { val user = readUserFromThreadContext(client) - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getMonitorRequest.monitorId) - .version(getMonitorRequest.version) - .fetchSourceContext(getMonitorRequest.srcContext) + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getWorkflowRequest.workflowId) + .version(getWorkflowRequest.version) + .fetchSourceContext(getWorkflowRequest.srcContext) if (!validateUserBackendRoles(user, actionListener)) { return @@ -69,7 +74,7 @@ class TransportGetWorkflowAction @Inject constructor( actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - "Monitor not found.", + "Workflow not found.", RestStatus.NOT_FOUND ) ) @@ -77,21 +82,21 @@ class TransportGetWorkflowAction @Inject constructor( return } - var monitor: Monitor? = null + var workflow: Workflow? = null if (!response.isSourceEmpty) { XContentHelper.createParser( xContentRegistry, LoggingDeprecationHandler.INSTANCE, response.sourceAsBytesRef, XContentType.JSON ).use { xcp -> - monitor = ScheduledJob.parse(xcp, response.id, response.version) as Monitor + workflow = ScheduledJob.parse(xcp, response.id, response.version) as Workflow // security is enabled and filterby is enabled if (!checkUserPermissionsWithResource( user, - monitor?.user, + workflow?.user, actionListener, - "monitor", - getMonitorRequest.monitorId + "workflow", + getWorkflowRequest.workflowId ) ) { return @@ -100,13 +105,13 @@ class TransportGetWorkflowAction @Inject constructor( } actionListener.onResponse( - GetMonitorResponse( + GetWorkflowResponse( response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, - monitor + workflow ) ) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 239eba265..1745e7155 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -5,7 +5,11 @@ package org.opensearch.alerting +import org.opensearch.action.admin.indices.refresh.RefreshRequest +import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest import org.opensearch.commons.alerting.model.ChainedFindings import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.DataSources @@ -88,7 +92,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) } - fun `test update workflow success`() { + fun `test update workflow add monitor success`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(docQuery1) @@ -192,6 +196,183 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) } + fun `test update workflow remove monitor success`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 1, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) + } + + fun `test get workflow`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + + val workflowResponse = upsertWorkflow(workflowRequest)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + assertNotNull(getWorkflowResponse) + + val workflowById = getWorkflowResponse.workflow!! + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, getWorkflowResponse.id) + assertTrue("incorrect version", getWorkflowResponse.version > 0) + assertEquals("Workflow name not correct", workflowRequest.name, workflowById.name) + assertEquals("Workflow owner not correct", workflowRequest.owner, workflowById.owner) + assertEquals("Workflow input not correct", workflowRequest.inputs, workflowById.inputs) + + // Delegate verification + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 1, delegates.size) + + val delegate = delegates[0] + assertNotNull(delegate) + assertEquals("Delegate order not correct", 1, delegate.order) + assertEquals("Delegate id not correct", monitorResponse.id, delegate.monitorId) + } + + fun `test delete workflow`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + client().execute( + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, DeleteWorkflowRequest(workflowId, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + client().admin().indices().refresh(RefreshRequest(customQueryIndex)).get() + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + fun `test create workflow without delegate failure`() { val workflow = randomWorkflowMonitor( monitorIds = Collections.emptyList() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 3c1eeee79..1ced2e2ae 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -9,6 +9,8 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope import org.opensearch.action.support.WriteRequest import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowResponse import org.opensearch.commons.alerting.action.IndexWorkflowRequest import org.opensearch.commons.alerting.action.IndexWorkflowResponse import org.opensearch.commons.alerting.model.ScheduledJob @@ -17,6 +19,8 @@ import org.opensearch.index.query.TermQueryBuilder import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.rest.RestRequest import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.search.fetch.subphase.FetchSourceContext + /** * A test that keep a singleton node started for all tests that can be used to get * references to Guice injectors in unit tests. @@ -55,4 +59,12 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { return client().execute(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, request).actionGet() } + + protected fun getWorkflowById( + id: String, + version: Long = 1L, + fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE + ): GetWorkflowResponse { + return client().execute(AlertingActions.GET_WORKFLOW_ACTION_TYPE, GetWorkflowRequest(id, version, RestRequest.Method.GET, fetchSourceContext)).get() + } } diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index 768f73a9a..29f499ba1 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -404,20 +404,37 @@ "inputs": { "type": "nested", "properties": { - "search": { + "sequence": { "properties": { - "indices": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 + "delegates": { + "type": "nested", + "properties": { + "order": { + "type": "integer" + }, + "monitorId": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "chainedFindings": { + "properties": { + "monitorId": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } } } - }, - "query": { - "type": "object", - "enabled": false } } } From feebf0e3f769eba0b5e849813b10caffc95533cc Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Thu, 23 Feb 2023 22:25:59 +0100 Subject: [PATCH 03/18] Updated getting and deleting the workflow in order to check if the monitor index is not initialized yet. Added workflow crud test cases Signed-off-by: Stevan Buzejic --- .../TransportDeleteWorkflowAction.kt | 21 +- .../transport/TransportGetWorkflowAction.kt | 12 +- .../TransportIndexCompositeWorkflowAction.kt | 6 + .../org/opensearch/alerting/TestHelpers.kt | 4 + .../opensearch/alerting/WorkflowMonitorIT.kt | 394 ++++++++++++++++-- .../transport/WorkflowSingleNodeTestCase.kt | 8 + 6 files changed, 400 insertions(+), 45 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt index e0c011c45..f50042052 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -9,6 +9,7 @@ import kotlinx.coroutines.CoroutineName import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.launch +import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest @@ -18,6 +19,7 @@ import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction +import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException @@ -36,10 +38,13 @@ import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject +import org.opensearch.index.IndexNotFoundException import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService +private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) + class TransportDeleteWorkflowAction @Inject constructor( transportService: TransportService, val client: Client, @@ -64,7 +69,7 @@ class TransportDeleteWorkflowAction @Inject constructor( val user = readUserFromThreadContext(client) val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) - .setRefreshPolicy(transformedRequest.refreshPolicy) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) if (!validateUserBackendRoles(user, actionListener)) { return @@ -111,7 +116,16 @@ class TransportDeleteWorkflowAction @Inject constructor( ) } } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found.", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } @@ -122,7 +136,7 @@ class TransportDeleteWorkflowAction @Inject constructor( if (getResponse.isExists == false) { actionListener.onFailure( AlertingException.wrap( - OpenSearchStatusException("Workflow with $workflowId is not found", RestStatus.NOT_FOUND) + OpenSearchStatusException("Workflow not found.", RestStatus.NOT_FOUND) ) ) } @@ -134,6 +148,7 @@ class TransportDeleteWorkflowAction @Inject constructor( } private suspend fun deleteWorkflow(workflow: Workflow): DeleteResponse { + log.debug("Deleting the workflow with id ${deleteRequest.id()}") return client.suspendUntil { delete(deleteRequest, it) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt index fcd3ffd52..f0802da4d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt @@ -26,6 +26,7 @@ import org.opensearch.commons.alerting.action.GetWorkflowRequest import org.opensearch.commons.alerting.action.GetWorkflowResponse import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.index.IndexNotFoundException import org.opensearch.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -117,7 +118,16 @@ class TransportGetWorkflowAction @Inject constructor( } override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } ) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt index 1ea721b39..7ce4f48ee 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt @@ -463,6 +463,12 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( } suspend fun validateRequest(request: IndexWorkflowRequest) { + if (request.workflow.inputs.isEmpty()) + throw AlertingException.wrap(IllegalArgumentException("Input list can not be empty.")) + + if (request.workflow.inputs[0] !is CompositeInput) + throw AlertingException.wrap(IllegalArgumentException("When creating a workflow input must be CompositeInput")) + val compositeInput = request.workflow.inputs[0] as CompositeInput val monitorIds = compositeInput.sequence.delegates.stream().map { it.monitorId }.collect(Collectors.toList()) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index 4e828b12c..f858d7831 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -224,6 +224,7 @@ fun randomDocumentLevelMonitor( } fun randomWorkflowMonitor( + id: String = Workflow.NO_ID, monitorIds: List, name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), user: User? = randomUser(), @@ -241,6 +242,7 @@ fun randomWorkflowMonitor( } return Workflow( + id = id, name = name, enabled = enabled, schedule = schedule, @@ -253,6 +255,7 @@ fun randomWorkflowMonitor( } fun randomWorkflowMonitorWithDelegates( + id: String = Workflow.NO_ID, delegates: List, name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), user: User? = randomUser(), @@ -262,6 +265,7 @@ fun randomWorkflowMonitorWithDelegates( lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), ): Workflow { return Workflow( + id = id, name = name, enabled = enabled, schedule = schedule, diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 1745e7155..d5def95dc 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -5,11 +5,7 @@ package org.opensearch.alerting -import org.opensearch.action.admin.indices.refresh.RefreshRequest -import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase -import org.opensearch.commons.alerting.action.AlertingActions -import org.opensearch.commons.alerting.action.DeleteWorkflowRequest import org.opensearch.commons.alerting.model.ChainedFindings import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.DataSources @@ -75,6 +71,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Workflow input not correct", workflow.inputs, workflowById.inputs) // Delegate verification + @Suppress("UNCHECKED_CAST") val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } assertEquals("Delegates size not correct", 2, delegates.size) @@ -171,6 +168,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) // Delegate verification + @Suppress("UNCHECKED_CAST") val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } assertEquals("Delegates size not correct", 3, delegates.size) @@ -264,6 +262,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) // Delegate verification + @Suppress("UNCHECKED_CAST") val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } assertEquals("Delegates size not correct", 1, delegates.size) @@ -274,22 +273,13 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } fun `test get workflow`() { - val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val customFindingsIndex = "custom_findings_index" - val customFindingsIndexPattern = "custom_findings_index-1" - val customQueryIndex = "custom_alerts_index" val monitor = randomDocumentLevelMonitor( inputs = listOf(docLevelInput), triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern - ) ) val monitorResponse = createMonitor(monitor)!! @@ -316,6 +306,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Workflow input not correct", workflowRequest.inputs, workflowById.inputs) // Delegate verification + @Suppress("UNCHECKED_CAST") val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } assertEquals("Delegates size not correct", 1, delegates.size) @@ -325,23 +316,52 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Delegate id not correct", monitorResponse.id, delegate.monitorId) } - fun `test delete workflow`() { - val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + fun `test get workflow for invalid id monitor index doesn't exist`() { + // Get workflow for non existing workflow id + try { + getWorkflowById(id = "-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found") + ) + } + } + } + + fun `test get workflow for invalid id monitor index exists`() { val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val customFindingsIndex = "custom_findings_index" - val customFindingsIndexPattern = "custom_findings_index-1" - val customQueryIndex = "custom_alerts_index" val monitor = randomDocumentLevelMonitor( inputs = listOf(docLevelInput), triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern - ) + ) + createMonitor(monitor) + // Get workflow for non existing workflow id + try { + getWorkflowById(id = "-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found") + ) + } + } + } + + fun `test delete workflow`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) ) val monitorResponse = createMonitor(monitor)!! @@ -356,17 +376,89 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(getWorkflowResponse) assertEquals(workflowId, getWorkflowResponse.id) - client().execute( - AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, DeleteWorkflowRequest(workflowId, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() - client().admin().indices().refresh(RefreshRequest(customQueryIndex)).get() + deleteWorkflow(workflowId) // Verify that the workflow is deleted try { getWorkflowById(workflowId) } catch (e: Exception) { e.message?.let { assertTrue( - "Exception not returning IndexWorkflow Action error ", + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + + fun `test delete monitor that is part of workflow sequence`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + deleteWorkflow(workflowId) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + + fun `test delete workflow for invalid id monitor index doesn't exists`() { + // Try deleting non-existing workflow + try { + deleteWorkflow("-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning DeleteWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + } + + fun `test delete workflow for invalid id monitor index exists`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + createMonitor(monitor) + // Try deleting non-existing workflow + try { + deleteWorkflow("-1") + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning DeleteWorkflow Action error ", it.contains("Workflow not found.") ) } @@ -389,6 +481,47 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test update workflow without delegate failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflowMonitor( + id = workflowResponse.id, + monitorIds = Collections.emptyList() + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Delegates list can not be empty.") + ) + } + } + } + fun `test create workflow duplicate delegate failure`() { val workflow = randomWorkflowMonitor( monitorIds = listOf("1", "1", "2") @@ -405,24 +538,50 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test update workflow duplicate delegate failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflowMonitor( + id = workflowResponse.id, + monitorIds = listOf("1", "1", "2") + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Duplicate delegates not allowed") + ) + } + } + } + fun `test create workflow delegate monitor doesn't exist failure`() { - val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val customFindingsIndex = "custom_findings_index" - val customFindingsIndexPattern = "custom_findings_index-1" - val customQueryIndex = "custom_alerts_index" val monitor = randomDocumentLevelMonitor( inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern - ) + triggers = listOf(trigger) ) val monitorResponse = createMonitor(monitor)!! @@ -441,6 +600,41 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test update workflow delegate monitor doesn't exist failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + workflow = randomWorkflowMonitor( + id = workflowResponse.id, + monitorIds = listOf("-1", monitorResponse.id) + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("are not valid monitor ids") + ) + } + } + } + fun `test create workflow sequence order not correct failure`() { val delegates = listOf( Delegate(1, "monitor-1"), @@ -462,6 +656,45 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test update workflow sequence order not correct failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3") + ) + workflow = randomWorkflowMonitorWithDelegates( + id = workflowResponse.id, + delegates = delegates + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + ) + } + } + } + fun `test create workflow chained findings monitor not in sequence failure`() { val delegates = listOf( Delegate(1, "monitor-1"), @@ -484,6 +717,46 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test update workflow chained findings monitor not in sequence failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedFindings("monitor-x")) + ) + workflow = randomWorkflowMonitorWithDelegates( + id = workflowResponse.id, + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + ) + } + } + } + fun `test create workflow chained findings order not correct failure`() { val delegates = listOf( Delegate(1, "monitor-1"), @@ -505,4 +778,43 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } } + + fun `test update workflow chained findings order not correct failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val monitorResponse = createMonitor(monitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + val delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedFindings("monitor-2")) + ) + workflow = randomWorkflowMonitorWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + ) + } + } + } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 1ced2e2ae..626df7a9f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope import org.opensearch.action.support.WriteRequest import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest import org.opensearch.commons.alerting.action.GetWorkflowRequest import org.opensearch.commons.alerting.action.GetWorkflowResponse import org.opensearch.commons.alerting.action.IndexWorkflowRequest @@ -67,4 +68,11 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { ): GetWorkflowResponse { return client().execute(AlertingActions.GET_WORKFLOW_ACTION_TYPE, GetWorkflowRequest(id, version, RestRequest.Method.GET, fetchSourceContext)).get() } + + protected fun deleteWorkflow(workflowId: String) { + client().execute( + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, + DeleteWorkflowRequest(workflowId, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + } } From 22eb90030ca9fc6e6a477c15c1236a299d5fdd13 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 27 Feb 2023 16:24:06 +0100 Subject: [PATCH 04/18] When deleting the monitor, added a check if the monitor is part of the workflow Signed-off-by: Stevan Buzejic --- .../transport/TransportDeleteMonitorAction.kt | 40 +++++++++++++++++-- .../opensearch/alerting/WorkflowMonitorIT.kt | 11 ++--- .../transport/AlertingSingleNodeTestCase.kt | 6 +++ .../transport/WorkflowSingleNodeTestCase.kt | 10 ++++- .../resources/mappings/scheduled-jobs.json | 39 ++++++++++-------- 5 files changed, 80 insertions(+), 26 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt index ab57a0d45..89c0133c7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt @@ -10,6 +10,7 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager +import org.apache.lucene.search.join.ScoreMode import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest @@ -17,6 +18,8 @@ import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.alerting.opensearchapi.suspendUntil @@ -35,6 +38,7 @@ import org.opensearch.commons.alerting.action.DeleteMonitorRequest import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject import org.opensearch.index.query.QueryBuilders @@ -42,6 +46,7 @@ import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.DeleteByQueryAction import org.opensearch.index.reindex.DeleteByQueryRequestBuilder import org.opensearch.rest.RestStatus +import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import kotlin.coroutines.resume @@ -95,9 +100,10 @@ class TransportDeleteMonitorAction @Inject constructor( try { val monitor = getMonitor() - val canDelete = user == null || - !doFilterForUser(user) || - checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + val canDelete = monitorIsNotInWorkflows(monitor.id) && ( + user == null || !doFilterForUser(user) || + checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + ) if (canDelete) { val deleteResponse = deleteMonitor(monitor) @@ -114,6 +120,34 @@ class TransportDeleteMonitorAction @Inject constructor( } } + /** + * Checks if the monitor is part of the workflow + * + * @param monitorId - id of monitor that is checked if it is a workflow delegate + */ + private suspend fun monitorIsNotInWorkflows(monitorId: String): Boolean { + val queryBuilder = QueryBuilders.nestedQuery( + Workflow.WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.matchQuery( + Workflow.WORKFLOW_MONITOR_PATH, + monitorId + ) + ), + ScoreMode.None + ) + + val searchRequest = SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder).fetchSource(true)) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + if (searchResponse.hits.totalHits?.value == 0L) { + return true + } + return false + } + private suspend fun getMonitor(): Monitor { val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index d5def95dc..56aab4537 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -406,6 +406,8 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val workflowRequest = randomWorkflowMonitor( monitorIds = listOf(monitorResponse.id) ) + + (workflowRequest.inputs.get(0) as CompositeInput).sequence.delegates.get(0).monitorId val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -413,15 +415,14 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(getWorkflowResponse) assertEquals(workflowId, getWorkflowResponse.id) - deleteWorkflow(workflowId) - // Verify that the workflow is deleted + // Verify that the monitor can't be deleted because it's included in the workflow try { - getWorkflowById(workflowId) + deleteMonitor(monitorResponse.id) } catch (e: Exception) { e.message?.let { assertTrue( - "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + "Exception not returning DeleteMonitor Action error ", + it.contains("Not allowed to delete this monitor!") ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 61e788a32..83ac60912 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -23,6 +23,8 @@ import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.action.GetFindingsRequest import org.opensearch.commons.alerting.action.GetFindingsResponse import org.opensearch.commons.alerting.action.IndexMonitorRequest @@ -178,6 +180,10 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext) ).get() + protected fun deleteMonitor(monitorId: String): DeleteMonitorResponse = client().execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + override fun getPlugins(): List> { return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 626df7a9f..d9f36f721 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -7,6 +7,7 @@ package org.opensearch.alerting.transport import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope import org.opensearch.action.support.WriteRequest +import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions import org.opensearch.commons.alerting.action.DeleteWorkflowRequest @@ -44,7 +45,14 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { return searchResponse.hits.hits.map { it -> val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } - Workflow.parse(xcp, it.id, it.version) + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } + } + workflow.copy(id = it.id, version = it.version) }.first() } diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index 29f499ba1..3a94f86ac 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -404,26 +404,18 @@ "inputs": { "type": "nested", "properties": { - "sequence": { + "composite_input": { + "type": "nested", "properties": { - "delegates": { - "type": "nested", + "sequence": { "properties": { - "order": { - "type": "integer" - }, - "monitorId": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "chainedFindings": { + "delegates": { + "type": "nested", "properties": { - "monitorId": { + "order": { + "type": "integer" + }, + "monitor_id": { "type": "text", "fields": { "keyword": { @@ -431,6 +423,19 @@ "ignore_above": 256 } } + }, + "chained_findings": { + "properties": { + "monitor_id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } } } } From d6269ab9a01dee18ddeee1d5442f2005b014a5af Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Feb 2023 09:52:35 +0100 Subject: [PATCH 05/18] Added transport workflow execution layer. Adjusted monitor runners to consider the workflow execution id Added worfklow service used for retrieving monitors and their findings. Added business logic for considering the chained monitors Signed-off-by: Stevan Buzejic --- alerting/build.gradle | 2 +- .../org/opensearch/alerting/AlertingPlugin.kt | 22 +- .../alerting/BucketLevelMonitorRunner.kt | 38 ++- .../alerting/DocumentLevelMonitorRunner.kt | 49 ++- .../org/opensearch/alerting/InputService.kt | 28 +- .../org/opensearch/alerting/MonitorRunner.kt | 4 +- .../alerting/MonitorRunnerExecutionContext.kt | 1 + .../alerting/QueryLevelMonitorRunner.kt | 6 +- .../opensearch/alerting/WorkflowService.kt | 177 +++++++++++ .../alerting/action/ExecuteWorkflowAction.kt | 15 + .../alerting/action/ExecuteWorkflowRequest.kt | 60 ++++ .../action/ExecuteWorkflowResponse.kt | 44 +++ .../alerting/model/WorkflowMetadata.kt | 11 + .../TransportExecuteWorkflowAction.kt | 120 ++++++++ .../transport/TransportSearchMonitorAction.kt | 6 +- .../workflow/CompositeWorkflowRunner.kt | 86 +++++- .../alerting/workflow/WorkflowRunContext.kt | 12 + .../alerting/workflow/WorkflowRunner.kt | 7 +- .../workflow/WorkflowRunnerService.kt | 8 +- .../alerting/alerts/finding_mapping.json | 3 + .../opensearch/alerting/WorkflowRunnerIT.kt | 282 ++++++++++++++++++ .../transport/AlertingSingleNodeTestCase.kt | 25 ++ .../transport/WorkflowSingleNodeTestCase.kt | 10 + build.gradle | 2 +- 24 files changed, 975 insertions(+), 43 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt create mode 100644 alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt diff --git a/alerting/build.gradle b/alerting/build.gradle index d2bbb8bc1..f0499d421 100644 --- a/alerting/build.gradle +++ b/alerting/build.gradle @@ -259,7 +259,7 @@ String bwcRemoteFile = 'https://ci.opensearch.org/ci/dbc/bundle-build/1.1.0/2021 testClusters { "${baseName}$i" { testDistribution = "ARCHIVE" - versions = ["1.1.0", "2.4.0-SNAPSHOT"] + versions = ["1.1.0", "2.5.0-SNAPSHOT"] numberOfNodes = 3 plugin(provider(new Callable(){ @Override diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index c4151527e..0b8e121a7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -8,6 +8,7 @@ package org.opensearch.alerting import org.opensearch.action.ActionRequest import org.opensearch.action.ActionResponse import org.opensearch.alerting.action.ExecuteMonitorAction +import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.GetDestinationsAction import org.opensearch.alerting.action.GetEmailAccountAction import org.opensearch.alerting.action.GetEmailGroupAction @@ -46,6 +47,7 @@ import org.opensearch.alerting.transport.TransportAcknowledgeAlertAction import org.opensearch.alerting.transport.TransportDeleteMonitorAction import org.opensearch.alerting.transport.TransportDeleteWorkflowAction import org.opensearch.alerting.transport.TransportExecuteMonitorAction +import org.opensearch.alerting.transport.TransportExecuteWorkflowAction import org.opensearch.alerting.transport.TransportGetAlertsAction import org.opensearch.alerting.transport.TransportGetDestinationsAction import org.opensearch.alerting.transport.TransportGetEmailAccountAction @@ -60,6 +62,7 @@ import org.opensearch.alerting.transport.TransportSearchEmailGroupAction import org.opensearch.alerting.transport.TransportSearchMonitorAction import org.opensearch.alerting.util.DocLevelMonitorQueries import org.opensearch.alerting.util.destinationmigration.DestinationMigrationCoordinator +import org.opensearch.alerting.workflow.WorkflowRunnerService import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.node.DiscoveryNodes @@ -133,6 +136,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R } lateinit var runner: MonitorRunnerService + lateinit var workflowRunner: WorkflowRunnerService lateinit var scheduler: JobScheduler lateinit var sweeper: JobSweeper lateinit var scheduledJobIndices: ScheduledJobIndices @@ -187,7 +191,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexCompositeWorkflowAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ACTION_TYPE, TransportGetWorkflowAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java) + ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java), + ActionPlugin.ActionHandler(ExecuteWorkflowAction.INSTANCE, TransportExecuteWorkflowAction::class.java) ) } @@ -234,6 +239,21 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) .registerConsumers() .registerDestinationSettings() + workflowRunner = WorkflowRunnerService + .registerClusterService(clusterService) + .registerClient(client) + .registerNamedXContentRegistry(xContentRegistry) + .registerScriptService(scriptService) + .registerSettings(settings) + .registerThreadPool(threadPool) + .registerAlertIndices(alertIndices) + .registerInputService(InputService(client, scriptService, namedWriteableRegistry, xContentRegistry)) + .registerTriggerService(TriggerService(scriptService)) + .registerAlertService(AlertService(client, xContentRegistry, alertIndices)) + .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) + .registerWorkflowService(WorkflowService(client, xContentRegistry)) + .registerConsumers() + .registerDestinationSettings() scheduledJobIndices = ScheduledJobIndices(client.admin(), clusterService) docLevelMonitorQueries = DocLevelMonitorQueries(client, clusterService) scheduler = JobScheduler(threadPool, runner) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 3c4fc6425..0edc8ae31 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -25,6 +25,7 @@ import org.opensearch.alerting.util.defaultToPerExecutionAction import org.opensearch.alerting.util.getActionExecutionPolicy import org.opensearch.alerting.util.getBucketKeysHash import org.opensearch.alerting.util.getCombinedTriggerRunResult +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder @@ -39,7 +40,9 @@ import org.opensearch.commons.alerting.model.action.PerAlertActionScope import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import org.opensearch.commons.alerting.util.string import org.opensearch.index.query.BoolQueryBuilder +import org.opensearch.index.query.MatchQueryBuilder import org.opensearch.index.query.QueryBuilders +import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.rest.RestStatus import org.opensearch.script.Script import org.opensearch.script.ScriptType @@ -59,7 +62,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowExecutionContext: WorkflowRunContext? ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -118,7 +122,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitor, periodStart, periodEnd, - monitorResult.inputResults + monitorResult.inputResults, + workflowExecutionContext ) if (firstIteration) { firstPageOfInputResults = inputResults @@ -335,7 +340,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowRunContext: WorkflowRunContext? = null ): List { monitor.inputs.forEach { input -> if (input is SearchInput) { @@ -346,14 +352,14 @@ object BucketLevelMonitorRunner : MonitorRunner() { for (aggFactory in (query.aggregations() as AggregatorFactories.Builder).aggregatorFactories) { when (aggFactory) { is CompositeAggregationBuilder -> { - var grouByFields = 0 // if number of fields used to group by > 1 we won't calculate findings + var groupByFields = 0 // if number of fields used to group by > 1 we won't calculate findings val sources = aggFactory.sources() for (source in sources) { - if (grouByFields > 0) { + if (groupByFields > 0) { logger.error("grouByFields > 0. not generating findings for bucket level monitor ${monitor.id}") return listOf() } - grouByFields++ + groupByFields++ fieldName = source.field() } } @@ -389,10 +395,22 @@ object BucketLevelMonitorRunner : MonitorRunner() { val queryBuilder = if (input.query.query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(source.query()) queryBuilder.filter(QueryBuilders.termsQuery(fieldName, bucketValues)) + + if (workflowRunContext != null && !workflowRunContext.indexToDocIds.isNullOrEmpty()) { + workflowRunContext.indexToDocIds.forEach { entry -> + queryBuilder + .should() + .add( + BoolQueryBuilder() + .must(MatchQueryBuilder("_index", entry.key)) + .must(TermsQueryBuilder("_id", entry.value)) + ) + } + } sr.source().query(queryBuilder) } val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(sr, it) } - return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding) + return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding, workflowRunContext?.workflowExecutionId) } else { logger.error("Couldn't resolve groupBy field. Not generating bucket level monitor findings for monitor %${monitor.id}") } @@ -405,7 +423,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { searchResponse: SearchResponse, monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowExecutionId: String? = null ): List { val docIdsByIndexName: MutableMap> = mutableMapOf() for (hit in searchResponse.hits.hits) { @@ -424,7 +443,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorName = monitor.name, index = it.key, timestamp = Instant.now(), - docLevelQueries = listOf() + docLevelQueries = listOf(), + workflowExecutionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index c45548c6b..fa7913670 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -25,6 +25,7 @@ import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.defaultToPerExecutionAction import org.opensearch.alerting.util.getActionExecutionPolicy import org.opensearch.alerting.util.updateMonitorMetadata +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.Client import org.opensearch.cluster.routing.ShardRouting import org.opensearch.cluster.service.ClusterService @@ -63,7 +64,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { logger.debug("Document-level-monitor is running ...") var monitorResult = MonitorRunResult(monitor.name, periodStart, periodEnd) @@ -125,6 +127,9 @@ object DocumentLevelMonitorRunner : MonitorRunner() { } } + // If monitor execution is triggered from a workflow + val indexToRelatedDocIdsMap = workflowRunContext?.indexToDocIds + indices.forEach { indexName -> // Prepare lastRunContext for each index val indexLastRunContext = lastRunContext.getOrPut(indexName) { @@ -154,7 +159,13 @@ object DocumentLevelMonitorRunner : MonitorRunner() { // Prepare DocumentExecutionContext for each index val docExecutionContext = DocumentExecutionContext(queries, indexLastRunContext, indexUpdatedRunContext) - val matchingDocs = getMatchingDocs(monitor, monitorCtx, docExecutionContext, indexName) + val matchingDocs = getMatchingDocs( + monitor, + monitorCtx, + docExecutionContext, + indexName, + indexToRelatedDocIdsMap?.get(index) + ) if (matchingDocs.isNotEmpty()) { val matchedQueriesForDocs = getMatchedQueries(monitorCtx, matchingDocs.map { it.second }, monitor, indexName) @@ -202,7 +213,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { idQueryMap, docsToQueries, queryToDocIds, - dryrun + dryrun, + workflowRunContext?.workflowExecutionId ) } @@ -223,7 +235,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { idQueryMap: Map, docsToQueries: Map>, queryToDocIds: Map>, - dryrun: Boolean + dryrun: Boolean, + workflowExecutionId: String? = null ): DocumentLevelTriggerRunResult { val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) val triggerResult = monitorCtx.triggerService!!.runDocLevelTrigger(monitor, trigger, queryToDocIds) @@ -234,7 +247,14 @@ object DocumentLevelMonitorRunner : MonitorRunner() { // TODO: Implement throttling for findings docsToQueries.forEach { val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! } - val findingId = createFindings(monitor, monitorCtx, triggeredQueries, it.key, !dryrun && monitor.id != Monitor.NO_ID) + val findingId = createFindings( + monitor, + monitorCtx, + triggeredQueries, + it.key, + !dryrun && monitor.id != Monitor.NO_ID, + workflowExecutionId + ) findings.add(findingId) if (triggerResult.triggeredDocs.contains(it.key)) { @@ -304,7 +324,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, docLevelQueries: List, matchingDocId: String, - shouldCreateFinding: Boolean + shouldCreateFinding: Boolean, + workflowExecutionId: String? = null, ): String { // Before the "|" is the doc id and after the "|" is the index val docIndex = matchingDocId.split("|") @@ -316,7 +337,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitorName = monitor.name, index = docIndex[1], docLevelQueries = docLevelQueries, - timestamp = Instant.now() + timestamp = Instant.now(), + workflowExecutionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() @@ -433,7 +455,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, docExecutionCtx: DocumentExecutionContext, - index: String + index: String, + docIds: List? = null ): List> { val count: Int = docExecutionCtx.updatedLastRunContext["shards_count"] as Int val matchingDocs = mutableListOf>() @@ -449,7 +472,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { shard, prevSeqNo, maxSeqNo, - null + null, + docIds ) if (hits.hits.isNotEmpty()) { @@ -468,7 +492,8 @@ object DocumentLevelMonitorRunner : MonitorRunner() { shard: String, prevSeqNo: Long?, maxSeqNo: Long, - query: String? + query: String?, + docIds: List? = null ): SearchHits { if (prevSeqNo?.equals(maxSeqNo) == true && maxSeqNo != 0L) { return SearchHits.empty() @@ -480,6 +505,10 @@ object DocumentLevelMonitorRunner : MonitorRunner() { boolQueryBuilder.must(QueryBuilders.queryStringQuery(query)) } + if (!docIds.isNullOrEmpty()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) + } + val request: SearchRequest = SearchRequest() .indices(index) .preference("_shards:$shard") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index b7e86ee90..d5e4782e0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -16,6 +16,7 @@ import org.opensearch.alerting.util.AggregationQueryRewriter import org.opensearch.alerting.util.addUserBackendRolesFilter import org.opensearch.alerting.util.executeTransportAction import org.opensearch.alerting.util.toMap +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.Client import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput @@ -26,6 +27,10 @@ import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.model.ClusterMetricsInput import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.index.query.BoolQueryBuilder +import org.opensearch.index.query.MatchQueryBuilder +import org.opensearch.index.query.QueryBuilders +import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.ScriptType @@ -47,12 +52,16 @@ class InputService( monitor: Monitor, periodStart: Instant, periodEnd: Instant, - prevResult: InputRunResults? = null + prevResult: InputRunResults? = null, + workflowRunContext: WorkflowRunContext? = null ): InputRunResults { return try { val results = mutableListOf>() val aggTriggerAfterKey: MutableMap = mutableMapOf() + // If monitor execution is triggered from a workflow + val indexToDocIds = workflowRunContext?.indexToDocIds + // TODO: If/when multiple input queries are supported for Bucket-Level Monitor execution, aggTriggerAfterKeys will // need to be updated to account for it monitor.inputs.forEach { input -> @@ -78,7 +87,22 @@ class InputService( val searchRequest = SearchRequest().indices(*input.indices.toTypedArray()) XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource).use { - searchRequest.source(SearchSourceBuilder.fromXContent(it)) + if (indexToDocIds.isNullOrEmpty()) { + searchRequest.source(SearchSourceBuilder.fromXContent(it)) + } else { + val source = SearchSourceBuilder.fromXContent(it) + val queryBuilder = QueryBuilders.boolQuery().must(source.query()) + indexToDocIds.forEach { entry -> + queryBuilder + .should() + .add( + BoolQueryBuilder() + .must(MatchQueryBuilder("_index", entry.key)) + .must(TermsQueryBuilder("_id", entry.value)) + ) + } + searchRequest.source(SearchSourceBuilder().query(queryBuilder)) + } } val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } aggTriggerAfterKey += AggregationQueryRewriter.getAfterKeysFromSearchResponse( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index c7887e466..f234a6eb0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -26,6 +26,7 @@ import org.opensearch.alerting.util.destinationmigration.publishLegacyNotificati import org.opensearch.alerting.util.destinationmigration.sendNotification import org.opensearch.alerting.util.isAllowed import org.opensearch.alerting.util.isTestAction +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.client.node.NodeClient import org.opensearch.common.Strings import org.opensearch.commons.alerting.model.Monitor @@ -41,7 +42,8 @@ abstract class MonitorRunner { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryRun: Boolean + dryRun: Boolean, + workflowRunContext: WorkflowRunContext? = null ): MonitorRunResult<*> suspend fun runAction( diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt index 55624d66e..e68512351 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt @@ -33,6 +33,7 @@ data class MonitorRunnerExecutionContext( var triggerService: TriggerService? = null, var alertService: AlertService? = null, var docLevelMonitorQueries: DocLevelMonitorQueries? = null, + var workflowService: WorkflowService? = null, @Volatile var retryPolicy: BackoffPolicy? = null, @Volatile var moveAlertsRetryPolicy: BackoffPolicy? = null, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt index 9864d4a9d..faa2ffde6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt @@ -12,6 +12,7 @@ import org.opensearch.alerting.opensearchapi.InjectorContextElement import org.opensearch.alerting.opensearchapi.withClosableContext import org.opensearch.alerting.script.QueryLevelTriggerExecutionContext import org.opensearch.alerting.util.isADMonitor +import org.opensearch.alerting.workflow.WorkflowRunContext import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.QueryLevelTrigger @@ -25,7 +26,8 @@ object QueryLevelMonitorRunner : MonitorRunner() { monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, - dryrun: Boolean + dryrun: Boolean, + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -48,7 +50,7 @@ object QueryLevelMonitorRunner : MonitorRunner() { if (!isADMonitor(monitor)) { withClosableContext(InjectorContextElement(monitor.id, monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, roles)) { monitorResult = monitorResult.copy( - inputResults = monitorCtx.inputService!!.collectInputResults(monitor, periodStart, periodEnd) + inputResults = monitorCtx.inputService!!.collectInputResults(monitor, periodStart, periodEnd, null, workflowRunContext) ) } } else { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt new file mode 100644 index 000000000..2d06039c3 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -0,0 +1,177 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.action.get.GetRequest +import org.opensearch.action.get.GetResponse +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse +import org.opensearch.alerting.opensearchapi.suspendUntil +import org.opensearch.alerting.util.AlertingException +import org.opensearch.client.Client +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.index.query.BoolQueryBuilder +import org.opensearch.index.query.MatchQueryBuilder +import org.opensearch.index.query.QueryBuilders +import org.opensearch.index.query.TermsQueryBuilder +import org.opensearch.search.builder.SearchSourceBuilder +import java.util.stream.Collectors + +private val log = LogManager.getLogger(WorkflowService::class.java) + +class WorkflowService( + val client: Client, + val xContentRegistry: NamedXContentRegistry, +) { + + suspend fun getFindingDocIdsPerMonitorExecution(chainedMonitor: Monitor, workflowExecutionId: String): Map> { + // Search findings index per monitor and workflow execution id + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, chainedMonitor.id)) + .filter(QueryBuilders.termQuery(Finding.WORKFLOW_EXECUTION_ID_FIELD, workflowExecutionId)) + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + ) + .indices(chainedMonitor.dataSources.findingsIndex) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + + // Get the findings docs + val findings = mutableListOf() + for (hit in searchResponse.hits) { + val xcp = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val finding = Finding.parse(xcp) + findings.add(finding) + } + // Based on the findings get the document ids + val indexToRelatedDocIdsMap = mutableMapOf>() + for (finding in findings) { + indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) + } + return indexToRelatedDocIdsMap + } + + suspend fun searchMonitors(monitors: List, size: Int, owner: String?): List { + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) + + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(size) + ) + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + return buildMonitors(searchResponse) + } + + private fun buildMonitors(response: SearchResponse): List { + if (response.isTimedOut) { + log.error("Request for getting monitors timeout") + throw OpenSearchException("Cannot determine that the ${ScheduledJob.SCHEDULED_JOBS_INDEX} index is healthy") + } + val monitors = mutableListOf() + try { + for (hit in response.hits) { + XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.sourceAsString + ).use { hitsParser -> + val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) as Monitor + monitors.add(monitor) + } + } + } catch (e: Exception) { + log.error("Error parsing monitors: ${e.message}") + throw AlertingException.wrap(e) + } + return monitors + } + + suspend fun getDocIdsPerFindingIndex(monitorId: String, workflowExecutionId: String): Map> { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) + + val getResponse: GetResponse = client.suspendUntil { + client.get(getRequest, it) + } + + val monitor = if (!getResponse.isSourceEmpty) { + XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, XContentType.JSON + ).use { xcp -> + ScheduledJob.parse(xcp, getResponse.id, getResponse.version) as Monitor + } + } else throw IllegalStateException("Delegate monitors don't exist $monitorId") + // Search findings index per monitor and workflow execution id + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, monitor.id)) + .filter(QueryBuilders.termQuery(Finding.WORKFLOW_EXECUTION_ID_FIELD, workflowExecutionId)) + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + ) + .indices(monitor.dataSources.findingsIndex) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + + // Get the findings docs + val findings = mutableListOf() + for (hit in searchResponse.hits) { + val xcp = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val finding = Finding.parse(xcp) + findings.add(finding) + } + + val indexToRelatedDocIdsMap = mutableMapOf>() + + for (finding in findings) { + indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) + } + + val toTypedArray = indexToRelatedDocIdsMap.keys.stream().collect(Collectors.toList()).toTypedArray() + val searchFindings = SearchRequest().indices(*toTypedArray) + val queryBuilder = QueryBuilders.boolQuery() + indexToRelatedDocIdsMap.forEach { entry -> + queryBuilder + .should() + .add( + BoolQueryBuilder() + .must(MatchQueryBuilder("_index", entry.key)) + .must(TermsQueryBuilder("_id", entry.value)) + ) + } + searchFindings.source(SearchSourceBuilder().query(queryBuilder)) + val finalQueryResponse: SearchResponse = client.suspendUntil { client.search(searchFindings, it) } + + val indexDocIds = mutableMapOf>() + for (hit in finalQueryResponse.hits) { + indexDocIds.getOrPut(hit.index) { mutableListOf() }.add(hit.id) + } + return indexDocIds + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt new file mode 100644 index 000000000..a6e986ea3 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowAction.kt @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionType + +class ExecuteWorkflowAction private constructor() : ActionType(NAME, ::ExecuteWorkflowResponse) { + companion object { + val INSTANCE = ExecuteWorkflowAction() + const val NAME = "cluster:admin/opendistro/alerting/workflow/execute" + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt new file mode 100644 index 000000000..26258dd7f --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.alerting.model.Workflow +import java.io.IOException + +class ExecuteWorkflowRequest : ActionRequest { + val dryrun: Boolean + val requestEnd: TimeValue + val workflowId: String? + val workflow: Workflow? + + constructor( + dryrun: Boolean, + requestEnd: TimeValue, + workflowId: String?, + workflow: Workflow? + ) : super() { + this.dryrun = dryrun + this.requestEnd = requestEnd + this.workflowId = workflowId + this.workflow = workflow + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBoolean(), + sin.readTimeValue(), + sin.readOptionalString(), + if (sin.readBoolean()) { + Workflow.readFrom(sin) + } else null + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeBoolean(dryrun) + out.writeTimeValue(requestEnd) + out.writeOptionalString(workflowId) + if (workflow != null) { + out.writeBoolean(true) + workflow.writeTo(out) + } else { + out.writeBoolean(false) + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt new file mode 100644 index 000000000..440fa2984 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.action + +import org.opensearch.action.ActionResponse +import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.ToXContentObject +import org.opensearch.common.xcontent.XContentBuilder +import java.io.IOException + +class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { + + val workflowRunResult: List> + + constructor(monitorRunResult: List>) : super() { + this.workflowRunResult = monitorRunResult + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) } + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeList(workflowRunResult) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startArray() + for (monitorResult in workflowRunResult) { + monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) + } + builder.endArray() + return builder + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt new file mode 100644 index 000000000..5f400615c --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt @@ -0,0 +1,11 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.model + +class WorkflowMetadata( + val chainedFindingMonitorId: String?, + val executionId: String? +) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt new file mode 100644 index 000000000..42367971f --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt @@ -0,0 +1,120 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.transport + +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.launch +import kotlinx.coroutines.withContext +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchStatusException +import org.opensearch.action.ActionListener +import org.opensearch.action.get.GetRequest +import org.opensearch.action.get.GetResponse +import org.opensearch.action.support.ActionFilters +import org.opensearch.action.support.HandledTransportAction +import org.opensearch.alerting.MonitorRunnerService +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.alerting.util.AlertingException +import org.opensearch.alerting.workflow.WorkflowRunnerService +import org.opensearch.client.Client +import org.opensearch.common.inject.Inject +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.NamedXContentRegistry +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.ConfigConstants +import org.opensearch.commons.alerting.model.ScheduledJob +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.authuser.User +import org.opensearch.rest.RestStatus +import org.opensearch.tasks.Task +import org.opensearch.transport.TransportService +import java.time.Instant + +private val log = LogManager.getLogger(TransportExecuteWorkflowAction::class.java) + +class TransportExecuteWorkflowAction @Inject constructor( + transportService: TransportService, + private val client: Client, + private val runner: MonitorRunnerService, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, +) : HandledTransportAction( + ExecuteWorkflowAction.NAME, transportService, actionFilters, ::ExecuteWorkflowRequest +) { + override fun doExecute(task: Task, execWorkflowRequest: ExecuteWorkflowRequest, actionListener: ActionListener) { + val userStr = client.threadPool().threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) + log.debug("User and roles string from thread context: $userStr") + val user: User? = User.parse(userStr) + + client.threadPool().threadContext.stashContext().use { + val executeWorkflow = fun(workflow: Workflow) { + // Launch the coroutine with the clients threadContext. This is needed to preserve authentication information + // stored on the threadContext set by the security plugin when using the Alerting plugin with the Security plugin. + // runner.launch(ElasticThreadContextElement(client.threadPool().threadContext)) { + runner.launch { + val (periodStart, periodEnd) = + workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) + try { + val workflowRunResult = + WorkflowRunnerService.runJob(workflow, periodStart, periodEnd, execWorkflowRequest.dryrun) + withContext(Dispatchers.IO) { + actionListener.onResponse(ExecuteWorkflowResponse(workflowRunResult)) + } + } catch (e: Exception) { + log.error("Unexpected error running workflow", e) + withContext(Dispatchers.IO) { + actionListener.onFailure(AlertingException.wrap(e)) + } + } + } + } + + if (execWorkflowRequest.workflowId != null) { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execWorkflowRequest.workflowId) + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Can't find workflow with id: ${response.id}", + RestStatus.NOT_FOUND + ) + ) + ) + return + } + if (!response.isSourceEmpty) { + XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, XContentType.JSON + ).use { xcp -> + val workflow = ScheduledJob.parse(xcp, response.id, response.version) as Workflow + executeWorkflow(workflow) + } + } + } + + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + } + ) + } else { + val workflow = when (user?.name.isNullOrEmpty()) { + true -> execWorkflowRequest.workflow as Workflow + false -> (execWorkflowRequest.workflow as Workflow).copy(user = user) + } + executeWorkflow(workflow) + } + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt index 13ed9c9cb..b058622a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt @@ -21,6 +21,7 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.authuser.User import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder @@ -48,6 +49,9 @@ class TransportSearchMonitorAction @Inject constructor( } override fun doExecute(task: Task, searchMonitorRequest: SearchMonitorRequest, actionListener: ActionListener) { + if (searchMonitorRequest.searchRequest.indices().isEmpty()) + searchMonitorRequest.searchRequest.indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + val searchSourceBuilder = searchMonitorRequest.searchRequest.source() val queryBuilder = if (searchSourceBuilder.query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(searchSourceBuilder.query()) @@ -97,7 +101,7 @@ class TransportSearchMonitorAction @Inject constructor( var boolQueryBuilder: BoolQueryBuilder = if (searchRequest.source().query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(searchRequest.source().query()) val bqb = BoolQueryBuilder() - bqb.should().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) + bqb.must().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) bqb.should().add(BoolQueryBuilder().must(MatchQueryBuilder("monitor.owner", "alerting"))) boolQueryBuilder.filter(bqb) searchRequest.source().query(boolQueryBuilder) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 2ad4cd23b..4a08411d2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -1,30 +1,90 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.workflow +import org.opensearch.alerting.BucketLevelMonitorRunner +import org.opensearch.alerting.DocumentLevelMonitorRunner import org.opensearch.alerting.MonitorRunnerExecutionContext +import org.opensearch.alerting.QueryLevelMonitorRunner import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.util.isDocLevelMonitor +import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.util.isBucketLevelMonitor import java.time.Instant +import java.util.UUID + +object CompositeWorkflowRunner : WorkflowRunner() { -class CompositeWorkflowRunner : WorkflowRunner() { override suspend fun runWorkflow( workflow: Workflow, monitorCtx: MonitorRunnerExecutionContext, periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): MonitorRunResult<*> { - TODO("Not yet implemented") - } + ): List> { + val workflowExecutionId = UUID.randomUUID().toString() + + val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + // Fetch monitors by ids + val monitors = monitorCtx.workflowService!!.searchMonitors(delegates.map { it.monitorId }, delegates.size, workflow.owner) + + // Validate the monitors size + if (delegates.size != monitors.size) { + val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() + throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds") + } + + val monitorsById = monitors.associateBy { it.id } + val resultList = mutableListOf>() + + for (delegate in delegates) { + var delegateMonitor = monitorsById[delegate.monitorId] + ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId}") + + var indexToDocIds = mapOf>() + if (delegate.chainedFindings != null) { + val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] + ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId}") + + indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsPerMonitorExecution(chainedMonitor, workflowExecutionId) + } + + val workflowRunContext = WorkflowRunContext(delegate.chainedFindings?.monitorId, workflowExecutionId, indexToDocIds) - companion object { - fun runWorkflow( - workflow: Workflow, - monitorCtx: MonitorRunnerExecutionContext, - periodStart: Instant, - periodEnd: Instant, - dryrun: Boolean - ): MonitorRunResult<*> { - TODO("Not yet implemented") + val runResult = if (delegateMonitor.isBucketLevelMonitor()) { + BucketLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else if (delegateMonitor.isDocLevelMonitor()) { + DocumentLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else { + QueryLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } + resultList.add(runResult) } + return resultList } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt new file mode 100644 index 000000000..b536b694b --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt @@ -0,0 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.workflow + +data class WorkflowRunContext( + val chainedMonitorId: String?, + val workflowExecutionId: String, + val indexToDocIds: Map> +) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt index 0fc989800..eb14e75fe 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt @@ -1,3 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + package org.opensearch.alerting.workflow import org.opensearch.alerting.MonitorRunnerExecutionContext @@ -12,5 +17,5 @@ abstract class WorkflowRunner { periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): MonitorRunResult<*> + ): List> } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt index 6379f2f55..44a9fe9b7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt @@ -16,6 +16,7 @@ import org.opensearch.alerting.AlertService import org.opensearch.alerting.InputService import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.TriggerService +import org.opensearch.alerting.WorkflowService import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.core.JobRunner import org.opensearch.alerting.model.MonitorRunResult @@ -97,6 +98,11 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo return this } + fun registerWorkflowService(workflowService: WorkflowService): WorkflowRunnerService { + monitorCtx.workflowService = workflowService + return this + } + fun registerTriggerService(triggerService: TriggerService): WorkflowRunnerService { monitorCtx.triggerService = triggerService return this @@ -191,7 +197,7 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo } } - suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): MonitorRunResult<*> { + suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): List> { val workflow = job as Workflow return CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun) } diff --git a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json index c9386b2ef..fdeb0c2a1 100644 --- a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json +++ b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json @@ -51,6 +51,9 @@ }, "timestamp": { "type": "long" + }, + "workflow_execution_id": { + "type": "keyword" } } } \ No newline at end of file diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt new file mode 100644 index 000000000..1cd4efafb --- /dev/null +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -0,0 +1,282 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting + +import org.junit.Assert +import org.opensearch.action.support.WriteRequest +import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.model.DataSources +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Table +import org.opensearch.index.query.QueryBuilders +import org.opensearch.script.Script +import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder +import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder +import org.opensearch.search.builder.SearchSourceBuilder +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit + +class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { + + fun `test execute workflow with custom alerts and finding index with doc level delegates`() { + val docQuery1 = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customAlertsIndex1 = "custom_alerts_index" + val customFindingsIndex1 = "custom_findings_index" + val customFindingsIndexPattern1 = "custom_findings_index-1" + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + dataSources = DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1 + ) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customAlertsIndex2 = "custom_alerts_index_2" + val customFindingsIndex2 = "custom_findings_index_2" + val customFindingsIndexPattern2 = "custom_findings_index-2" + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + dataSources = DataSources( + alertsIndex = customAlertsIndex2, + findingsIndex = customFindingsIndex2, + findingsIndexPattern = customFindingsIndexPattern2 + ) + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + + testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 and monitor2 + val testDoc2 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16645, + "test_strict_date_time" : "$testTime", + "test_field" : "us-west-2" + }""" + indexDoc(index, "2", testDoc2) + + testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Doesn't match + val testDoc3 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 123456, + "test_strict_date_time" : "$testTime", + "test_field" : "us-east-1" + }""" + indexDoc(index, "3", testDoc3) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + assertEquals(monitor1.name, monitorsRunResults[0].monitorName) + assertEquals(1, monitorsRunResults[0].triggerResults.size) + + Assert.assertEquals(monitor2.name, monitorsRunResults[1].monitorName) + Assert.assertEquals(1, monitorsRunResults[1].triggerResults.size) + + assertAlerts(monitorResponse, customAlertsIndex1, 2) + assertFindings(monitorResponse.id, customFindingsIndex1, 2, 2, listOf("1", "2")) + + assertAlerts(monitorResponse2, customAlertsIndex2, 1) + assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) + } + + fun `test execute workflow with custom alerts and finding index with doc level and bucket level delegates`() { + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2" + ) + ) + + val query = QueryBuilders.rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = listOf( + TermsValuesSourceBuilder("test_field").field("test_field") + ) + val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) + val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) + val triggerScript = """ + params.docCount > 0 + """.trimIndent() + + var trigger = randomBucketLevelTrigger() + trigger = trigger.copy( + bucketSelector = BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null + ) + ) + val bucketLevelMonitorResponse = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)))!! + + val docQuery = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "3") + val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) + val docLevelTrigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val customAlertsIndex = "custom_alerts_index" + val customFindingsIndexPattern = "custom_findings_index" + + var docLevelMonitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(docLevelTrigger), + dataSources = DataSources( + alertsIndex = customAlertsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(docLevelMonitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + assertNotNull(executeWorkflowResponse) + + val bucketLevelResponse = executeWorkflowResponse.workflowRunResult[1] + + assertEquals(bucketLevelMonitorResponse.monitor.name, bucketLevelResponse.monitorName) + val searchResult = bucketLevelResponse.inputResults.results.first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 2, buckets.size) + } + + fun `test bucket execution`() { + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2" + ) + ) + + val compositeSources = listOf( + TermsValuesSourceBuilder("test_field").field("test_field") + ) + val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) + val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).aggregation(compositeAgg)) + val triggerScript = """ + params.docCount > 0 + """.trimIndent() + + var trigger = randomBucketLevelTrigger() + trigger = trigger.copy( + bucketSelector = BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null + ) + ) + val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)))!! + val response = executeMonitor(monitor.monitor, monitor.id, false)!! + + assertEquals(monitor.monitor.name, response.monitorRunResult.monitorName) + @Suppress("UNCHECKED_CAST") + val searchResult = (response.monitorRunResult.inputResults.results).first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 2, buckets.size) + } + + private fun assertFindings( + monitorId: String, + customFindingsIndex: String, + findingSize: Int, + matchedQueryNumber: Int, + relatedDocIds: List + ) { + val findings = searchFindings(monitorId, customFindingsIndex) + assertEquals("Findings saved for test monitor", findingSize, findings.size) + + val findingDocIds = findings.flatMap { it.relatedDocIds } + + assertEquals("Didn't match $matchedQueryNumber query", matchedQueryNumber, findingDocIds.size) + assertTrue("Findings saved for test monitor", relatedDocIds.containsAll(findingDocIds)) + } + + private fun assertAlerts( + monitorResponse: IndexMonitorResponse, + customAlertsIndex: String, + alertSize: Int + ) { + val monitorId = monitorResponse.id + val alerts = searchAlerts(monitorId, customAlertsIndex) + assertEquals("Alert saved for test monitor", alertSize, alerts.size) + val table = Table("asc", "id", null, alertSize, 0, "") + var getAlertsResponse = client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex) + ) + .get() + assertTrue(getAlertsResponse != null) + assertTrue(getAlertsResponse.alerts.size == alertSize) + getAlertsResponse = client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) + .get() + assertTrue(getAlertsResponse != null) + assertTrue(getAlertsResponse.alerts.size == alertSize) + + val alertIds = getAlertsResponse.alerts.map { it.id } + val acknowledgeAlertResponse = client().execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE) + ).get() + + assertEquals(alertSize, acknowledgeAlertResponse.acknowledged.size) + } +} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 83ac60912..f235c6e9e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -42,6 +42,9 @@ import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchSingleNodeTestCase import java.time.Instant +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit import java.util.* /** @@ -72,6 +75,28 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { return client().execute(ExecuteMonitorAction.INSTANCE, request).get() } + protected fun insertSampleTimeSerializedData(index: String, data: List) { + data.forEachIndexed { i, value -> + val twoMinsAgo = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.MILLIS) + val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(twoMinsAgo) + val testDoc = """ + { + "test_strict_date_time": "$testTime", + "test_field": "$value", + "number": "$i" + } + """.trimIndent() + // Indexing documents with deterministic doc id to allow for easy selected deletion during testing + indexDoc(index, (i + 1).toString(), testDoc) + } + } + + @Suppress("UNCHECKED_CAST") + fun Map.stringMap(key: String): Map? { + val map = this as Map> + return map[key] + } + /** A test index that can be used across tests. Feel free to add new fields but don't remove any. */ protected fun createTestIndex() { createIndex( diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index d9f36f721..6ccd4c62c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -8,6 +8,10 @@ package org.opensearch.alerting.transport import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope import org.opensearch.action.support.WriteRequest import org.opensearch.common.xcontent.XContentParser +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions import org.opensearch.commons.alerting.action.DeleteWorkflowRequest @@ -22,6 +26,7 @@ import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.rest.RestRequest import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.fetch.subphase.FetchSourceContext +import java.time.Instant /** * A test that keep a singleton node started for all tests that can be used to get @@ -83,4 +88,9 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { DeleteWorkflowRequest(workflowId, WriteRequest.RefreshPolicy.IMMEDIATE) ).get() } + + protected fun executeWorkflow(workflow: Workflow, id: String, dryRun: Boolean = true): ExecuteWorkflowResponse? { + val request = ExecuteWorkflowRequest(dryRun, TimeValue(Instant.now().toEpochMilli()), id, workflow) + return client().execute(ExecuteWorkflowAction.INSTANCE, request).get() + } } diff --git a/build.gradle b/build.gradle index 6cb2b83e6..fe7ed38bc 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ buildscript { apply from: 'build-tools/repositories.gradle' ext { - opensearch_version = System.getProperty("opensearch.version", "2.4.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.5.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") // 2.4.0-SNAPSHOT -> 2.4.0.0-SNAPSHOT From c2588a0d21eb20dffc60d89090584a608031014c Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Feb 2023 10:02:47 +0100 Subject: [PATCH 06/18] Removed unused classes Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/model/WorkflowMetadata.kt | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt deleted file mode 100644 index 5f400615c..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.model - -class WorkflowMetadata( - val chainedFindingMonitorId: String?, - val executionId: String? -) From ad01b708ac7cf2d2717a991a4fcf16130a9ad304 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Feb 2023 19:35:51 +0100 Subject: [PATCH 07/18] Added rest action for executing the workflow Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 6 +- .../alerting/BucketLevelMonitorRunner.kt | 3 +- .../resthandler/RestExecuteWorkflowAction.kt | 74 +++++++++++++++++++ .../transport/TransportSearchMonitorAction.kt | 6 +- .../opensearch/alerting/WorkflowRunnerIT.kt | 2 +- 5 files changed, 83 insertions(+), 8 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 0b8e121a7..b6c41c24c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -28,6 +28,7 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestExecuteMonitorAction +import org.opensearch.alerting.resthandler.RestExecuteWorkflowAction import org.opensearch.alerting.resthandler.RestGetAlertsAction import org.opensearch.alerting.resthandler.RestGetDestinationsAction import org.opensearch.alerting.resthandler.RestGetEmailAccountAction @@ -124,8 +125,10 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R @JvmField val OPEN_SEARCH_DASHBOARDS_USER_AGENT = "OpenSearch-Dashboards" @JvmField val UI_METADATA_EXCLUDE = arrayOf("monitor.${Monitor.UI_METADATA_FIELD}") @JvmField val MONITOR_BASE_URI = "/_plugins/_alerting/monitors" + @JvmField val WORKFLOW_BASE_URI = "/_plugins/_alerting/workflows" @JvmField val DESTINATION_BASE_URI = "/_plugins/_alerting/destinations" @JvmField val LEGACY_OPENDISTRO_MONITOR_BASE_URI = "/_opendistro/_alerting/monitors" + @JvmField val LEGACY_OPENDISTRO_WORKFLOW_BASE_URI = "/_opendistro/_alerting/workflows" @JvmField val LEGACY_OPENDISTRO_DESTINATION_BASE_URI = "/_opendistro/_alerting/destinations" @JvmField val EMAIL_ACCOUNT_BASE_URI = "$DESTINATION_BASE_URI/email_accounts" @JvmField val EMAIL_GROUP_BASE_URI = "$DESTINATION_BASE_URI/email_groups" @@ -169,7 +172,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestGetEmailGroupAction(), RestGetDestinationsAction(), RestGetAlertsAction(), - RestGetFindingsAction() + RestGetFindingsAction(), + RestExecuteWorkflowAction() ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 0edc8ae31..422ca24a0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -159,7 +159,8 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx, periodStart, periodEnd, - !dryrun && monitor.id != Monitor.NO_ID + !dryrun && monitor.id != Monitor.NO_ID, + workflowExecutionContext ) } else { emptyList() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt new file mode 100644 index 000000000..ad2990d67 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt @@ -0,0 +1,74 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.resthandler + +import org.apache.logging.log4j.LogManager +import org.opensearch.alerting.AlertingPlugin +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.client.node.NodeClient +import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.rest.BaseRestHandler +import org.opensearch.rest.RestHandler +import org.opensearch.rest.RestRequest +import org.opensearch.rest.action.RestToXContentListener +import java.time.Instant + +private val log = LogManager.getLogger(RestExecuteWorkflowAction::class.java) + +class RestExecuteWorkflowAction : BaseRestHandler() { + + override fun getName(): String = "execute_workflow_action" + + override fun routes(): List { + return listOf() + } + + override fun replacedRoutes(): MutableList { + return mutableListOf( + RestHandler.ReplacedRoute( + RestRequest.Method.POST, + "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_execute", + RestRequest.Method.POST, + "${AlertingPlugin.LEGACY_OPENDISTRO_WORKFLOW_BASE_URI}/{workflowID}/_execute" + ), + RestHandler.ReplacedRoute( + RestRequest.Method.POST, + "${AlertingPlugin.WORKFLOW_BASE_URI}/_execute", + RestRequest.Method.POST, + "${AlertingPlugin.LEGACY_OPENDISTRO_WORKFLOW_BASE_URI}/_execute" + ) + ) + } + + override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/_execute") + + return RestChannelConsumer { channel -> + val dryrun = request.paramAsBoolean("dryrun", false) + val requestEnd = request.paramAsTime("period_end", TimeValue(Instant.now().toEpochMilli())) + + if (request.hasParam("workflowID")) { + val workflowId = request.param("workflowID") + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, workflowId, null) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } else { + val xcp = request.contentParser() + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val workflow = Workflow.parse(xcp, Workflow.NO_ID, Workflow.NO_VERSION) + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, null, workflow) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } + } + } + + override fun responseParams(): Set { + return setOf("dryrun", "period_end", "workflowID") + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt index b058622a3..13ed9c9cb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt @@ -21,7 +21,6 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.commons.alerting.model.Monitor -import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.authuser.User import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder @@ -49,9 +48,6 @@ class TransportSearchMonitorAction @Inject constructor( } override fun doExecute(task: Task, searchMonitorRequest: SearchMonitorRequest, actionListener: ActionListener) { - if (searchMonitorRequest.searchRequest.indices().isEmpty()) - searchMonitorRequest.searchRequest.indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - val searchSourceBuilder = searchMonitorRequest.searchRequest.source() val queryBuilder = if (searchSourceBuilder.query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(searchSourceBuilder.query()) @@ -101,7 +97,7 @@ class TransportSearchMonitorAction @Inject constructor( var boolQueryBuilder: BoolQueryBuilder = if (searchRequest.source().query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(searchRequest.source().query()) val bqb = BoolQueryBuilder() - bqb.must().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) + bqb.should().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) bqb.should().add(BoolQueryBuilder().must(MatchQueryBuilder("monitor.owner", "alerting"))) boolQueryBuilder.filter(bqb) searchRequest.source().query(boolQueryBuilder) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index 1cd4efafb..b40c30955 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -96,7 +96,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { // Doesn't match val testDoc3 = """{ "message" : "This is an error from IAD region", - "source.ip.v6.v2" : 123456, + "source.ip.v6.v2" : 16645, "test_strict_date_time" : "$testTime", "test_field" : "us-east-1" }""" From 519072650bffec41caaf4f1ffce4c811033eb37a Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Thu, 23 Feb 2023 18:02:25 +0100 Subject: [PATCH 08/18] Added integration tests for workflow execution. Added script modules when loading the cluster Signed-off-by: Stevan Buzejic --- alerting/build.gradle | 2 + .../org/opensearch/alerting/InputService.kt | 47 ++- .../alerting/MonitorRunnerServiceIT.kt | 7 +- .../org/opensearch/alerting/TestHelpers.kt | 1 + .../opensearch/alerting/WorkflowRunnerIT.kt | 275 ++++++++++++++---- .../transport/AlertingSingleNodeTestCase.kt | 44 ++- 6 files changed, 296 insertions(+), 80 deletions(-) diff --git a/alerting/build.gradle b/alerting/build.gradle index f0499d421..5488525e2 100644 --- a/alerting/build.gradle +++ b/alerting/build.gradle @@ -94,6 +94,8 @@ dependencies { testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" testImplementation "org.mockito:mockito-core:4.7.0" testImplementation "org.opensearch.plugin:reindex-client:${opensearch_version}" + testImplementation "org.opensearch.plugin:lang-painless:${opensearch_version}" + testImplementation "org.opensearch.plugin:lang-mustache-client:${opensearch_version}" } javadoc.enabled = false // turn off javadoc as it barfs on Kotlin code diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index d5e4782e0..e52542ae9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -29,6 +29,7 @@ import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.MatchQueryBuilder +import org.opensearch.index.query.QueryBuilder import org.opensearch.index.query.QueryBuilders import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.script.Script @@ -72,6 +73,13 @@ class InputService( "period_start" to periodStart.toEpochMilli(), "period_end" to periodEnd.toEpochMilli() ) + + // Rewrite query to consider the doc ids per given index + if (chainedFindingExist(indexToDocIds)) { + val updatedSourceQuery = updateInputQueryWithFindingDocIds(input.query.query(), indexToDocIds!!) + input.query.query(updatedSourceQuery) + } + // Deep copying query before passing it to rewriteQuery since otherwise, the monitor.input is modified directly // which causes a strange bug where the rewritten query persists on the Monitor across executions val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) @@ -87,22 +95,7 @@ class InputService( val searchRequest = SearchRequest().indices(*input.indices.toTypedArray()) XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource).use { - if (indexToDocIds.isNullOrEmpty()) { - searchRequest.source(SearchSourceBuilder.fromXContent(it)) - } else { - val source = SearchSourceBuilder.fromXContent(it) - val queryBuilder = QueryBuilders.boolQuery().must(source.query()) - indexToDocIds.forEach { entry -> - queryBuilder - .should() - .add( - BoolQueryBuilder() - .must(MatchQueryBuilder("_index", entry.key)) - .must(TermsQueryBuilder("_id", entry.value)) - ) - } - searchRequest.source(SearchSourceBuilder().query(queryBuilder)) - } + searchRequest.source(SearchSourceBuilder.fromXContent(it)) } val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } aggTriggerAfterKey += AggregationQueryRewriter.getAfterKeysFromSearchResponse( @@ -129,6 +122,28 @@ class InputService( } } + private fun updateInputQueryWithFindingDocIds( + query: QueryBuilder, + indexToDocIds: Map>, + ): QueryBuilder { + val queryBuilder = QueryBuilders.boolQuery().must(query) + val shouldQuery = QueryBuilders.boolQuery() + + indexToDocIds.forEach { entry -> + shouldQuery + .should() + .add( + BoolQueryBuilder() + .must(MatchQueryBuilder("_index", entry.key)) + .must(TermsQueryBuilder("_id", entry.value)) + ) + } + return queryBuilder.must(shouldQuery) + } + + private fun chainedFindingExist(indexToDocIds: Map>?) = + !indexToDocIds.isNullOrEmpty() + private fun deepCopyQuery(query: SearchSourceBuilder): SearchSourceBuilder { val out = BytesStreamOutput() query.writeTo(out) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt index 4ce7dcd23..289b3b1e6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt @@ -1329,7 +1329,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3" ) ) @@ -1340,7 +1343,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val termAgg = TermsAggregationBuilder("test_field").field("test_field") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(termAgg)) val triggerScript = """ - params.docCount > 0 + params.docCount > 1 """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index f858d7831..d06f6fa91 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -237,6 +237,7 @@ fun randomWorkflowMonitor( if (!monitorIds.isNullOrEmpty()) { delegates.add(Delegate(1, monitorIds[0])) for (i in 1 until monitorIds.size) { + // Order of monitors in workflow will be the same like forwarded meaning that the first monitorId will be used as second monitor chained finding delegates.add(Delegate(i + 1, monitorIds [i], ChainedFindings(monitorIds[i - 1]))) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index b40c30955..bebc17c39 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -7,6 +7,7 @@ package org.opensearch.alerting import org.junit.Assert import org.opensearch.action.support.WriteRequest +import org.opensearch.alerting.model.DocumentLevelTriggerRunResult import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest import org.opensearch.commons.alerting.action.AlertingActions @@ -120,16 +121,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) } - fun `test execute workflow with custom alerts and finding index with doc level and bucket level delegates`() { - insertSampleTimeSerializedData( - index, - listOf( - "test_value_1", - "test_value_1", // adding duplicate to verify aggregation - "test_value_2" - ) - ) - + fun `test execute workflow with custom alerts and finding index with bucket level doc level delegates when bucket level delegate is used in chained finding`() { val query = QueryBuilders.rangeQuery("test_strict_date_time") .gt("{{period_end}}||-10d") .lte("{{period_end}}") @@ -139,8 +131,9 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) + // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping val triggerScript = """ - params.docCount > 0 + params.docCount > 1 """.trimIndent() var trigger = randomBucketLevelTrigger() @@ -150,65 +143,125 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { bucketsPathsMap = mapOf("docCount" to "_count"), script = Script(triggerScript), parentBucketPath = "composite_agg", - filter = null + filter = null, ) ) - val bucketLevelMonitorResponse = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)))!! - - val docQuery = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "3") - val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) - val docLevelTrigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - - val customAlertsIndex = "custom_alerts_index" - val customFindingsIndexPattern = "custom_findings_index" - + val bucketCustomAlertsIndex = "custom_alerts_index" + val bucketCustomFindingsIndex = "custom_findings_index" + val bucketCustomFindingsIndexPattern = "custom_findings_index-1" + + val bucketLevelMonitorResponse = createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = bucketCustomAlertsIndex, + findingsIndex = bucketCustomFindingsIndex, + findingsIndexPattern = bucketCustomFindingsIndexPattern + ) + ) + )!! + + val docQuery1 = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field:\"test_value_1\"", name = "2") + val docQuery3 = DocLevelQuery(query = "test_field:\"test_value_3\"", name = "3") + val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2, docQuery3)) + val docTrigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val docCustomAlertsIndex = "custom_alerts_index" + val docCustomFindingsIndex = "custom_findings_index" + val docCustomFindingsIndexPattern = "custom_findings_index-1" var docLevelMonitor = randomDocumentLevelMonitor( inputs = listOf(docLevelInput), - triggers = listOf(docLevelTrigger), + triggers = listOf(docTrigger), dataSources = DataSources( - alertsIndex = customAlertsIndex, - findingsIndexPattern = customFindingsIndexPattern + alertsIndex = docCustomAlertsIndex, + findingsIndex = docCustomFindingsIndex, + findingsIndexPattern = docCustomFindingsIndexPattern ) ) - val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) var workflow = randomWorkflowMonitor( - monitorIds = listOf(docLevelMonitorResponse.id) + monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id)!! assertNotNull(workflowById) + // Creates 5 documents + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3" + ) + ) + val workflowId = workflowResponse.id + // 1. bucket level monitor should reduce the doc findings to 4 (1, 2, 3, 4) + // 2. Doc level monitor will match those 4 documents although it contains rules for matching all 5 documents (docQuery3 matches the fifth) val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! assertNotNull(executeWorkflowResponse) - val bucketLevelResponse = executeWorkflowResponse.workflowRunResult[1] - - assertEquals(bucketLevelMonitorResponse.monitor.name, bucketLevelResponse.monitorName) - val searchResult = bucketLevelResponse.inputResults.results.first() - @Suppress("UNCHECKED_CAST") - val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> - assertEquals("Incorrect search result", 2, buckets.size) + for (monitorRunResults in executeWorkflowResponse.workflowRunResult) { + if (bucketLevelMonitorResponse.monitor.name == monitorRunResults.monitorName) { + val searchResult = monitorRunResults.inputResults.results.first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 3, buckets.size) + + assertAlerts(bucketLevelMonitorResponse, bucketCustomAlertsIndex, 2) + assertFindings(bucketLevelMonitorResponse.id, bucketCustomFindingsIndex, 1, 4, listOf("1", "2", "3", "4")) + } else { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("1", "2", "3", "4") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse, docCustomAlertsIndex, 4) + assertFindings(docLevelMonitorResponse.id, docCustomFindingsIndex, 4, 4, listOf("1", "2", "3", "4")) + } + } } - fun `test bucket execution`() { - insertSampleTimeSerializedData( - index, - listOf( - "test_value_1", - "test_value_1", // adding duplicate to verify aggregation - "test_value_2" + fun `test execute workflow with custom alerts and finding index with bucket level and doc level delegates when doc level delegate is used in chained finding`() { + val docQuery1 = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field:\"test_value_3\"", name = "2") + + var docLevelMonitor = randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = DataSources( + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1" ) ) + val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! + + val query = QueryBuilders.rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val compositeSources = listOf( TermsValuesSourceBuilder("test_field").field("test_field") ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) - val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).aggregation(compositeAgg)) + val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) + // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping val triggerScript = """ - params.docCount > 0 + params.docCount > 1 """.trimIndent() var trigger = randomBucketLevelTrigger() @@ -218,18 +271,136 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { bucketsPathsMap = mapOf("docCount" to "_count"), script = Script(triggerScript), parentBucketPath = "composite_agg", - filter = null + filter = null, + ) + ) + + val bucketLevelMonitorResponse = createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1" + ) + ) + )!! + + var docLevelMonitor1 = randomDocumentLevelMonitor( + // Match the documents with test_field: test_value_3 + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index_1", + findingsIndex = "custom_findings_index_1", + findingsIndexPattern = "custom_findings_index_1-1" + ) + ) + + val docLevelMonitorResponse1 = createMonitor(docLevelMonitor1)!! + + val queryMonitorInput = SearchInput( + indices = listOf(index), + query = SearchSourceBuilder().query( + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + ) + ) + val queryTriggerScript = """ + return ctx.results[0].hits.hits.size() > 0 + """.trimIndent() + + val queryLevelTrigger = randomQueryLevelTrigger(condition = Script(queryTriggerScript)) + val queryMonitorResponse = createMonitor(randomQueryLevelMonitor(inputs = listOf(queryMonitorInput), triggers = listOf(queryLevelTrigger)))!! + + // 1. docMonitor (chainedFinding = null) 2. bucketMonitor (chainedFinding = docMonitor) 3. docMonitor (chainedFinding = bucketMonitor) 4. queryMonitor (chainedFinding = docMonitor 3) + var workflow = randomWorkflowMonitor( + monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id, docLevelMonitorResponse1.id, queryMonitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + // Creates 5 documents + insertSampleTimeSerializedData( + index, + listOf( + "test_value_1", + "test_value_1", // adding duplicate to verify aggregation + "test_value_2", + "test_value_2", + "test_value_3", + "test_value_3" ) ) - val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)))!! - val response = executeMonitor(monitor.monitor, monitor.id, false)!! - - assertEquals(monitor.monitor.name, response.monitorRunResult.monitorName) - @Suppress("UNCHECKED_CAST") - val searchResult = (response.monitorRunResult.inputResults.results).first() - @Suppress("UNCHECKED_CAST") - val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> - assertEquals("Incorrect search result", 2, buckets.size) + + val workflowId = workflowResponse.id + // 1. Doc level monitor should reduce the doc findings to 4 (3 - test_value_2, 4 - test_value_2, 5 - test_value_3, 6 - test_value_3) + // 2. Bucket level monitor will match the fetch the docs from current findings execution, although it contains rules for matching documents which has test_value_2 and test value_3 + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + assertNotNull(executeWorkflowResponse) + + for (monitorRunResults in executeWorkflowResponse.workflowRunResult) { + when (monitorRunResults.monitorName) { + // Verify first doc level monitor execution, alerts and findings + docLevelMonitorResponse.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("3", "4", "5", "6") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse, docLevelMonitorResponse.monitor.dataSources.alertsIndex, 4) + assertFindings(docLevelMonitorResponse.id, docLevelMonitorResponse.monitor.dataSources.findingsIndex, 4, 4, listOf("3", "4", "5", "6")) + } + // Verify second bucket level monitor execution, alerts and findings + bucketLevelMonitorResponse.monitor.name -> { + val searchResult = monitorRunResults.inputResults.results.first() + @Suppress("UNCHECKED_CAST") + val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> + assertEquals("Incorrect search result", 2, buckets.size) + + assertAlerts(bucketLevelMonitorResponse, bucketLevelMonitorResponse.monitor.dataSources.alertsIndex, 2) + assertFindings(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse.monitor.dataSources.findingsIndex, 1, 4, listOf("3", "4", "5", "6")) + } + // Verify third doc level monitor execution, alerts and findings + docLevelMonitorResponse1.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val docLevelTrigger = values.iterator().next() as DocumentLevelTriggerRunResult + val triggeredDocIds = docLevelTrigger.triggeredDocs.map { it.split("|")[0] } + val expectedTriggeredDocIds = listOf("5", "6") + assertEquals(expectedTriggeredDocIds, triggeredDocIds.sorted()) + + assertAlerts(docLevelMonitorResponse1, docLevelMonitorResponse1.monitor.dataSources.alertsIndex, 2) + assertFindings(docLevelMonitorResponse1.id, docLevelMonitorResponse1.monitor.dataSources.findingsIndex, 2, 2, listOf("5", "6")) + } + // Verify fourth query level monitor execution + queryMonitorResponse.monitor.name -> { + assertEquals(1, monitorRunResults.inputResults.results.size) + val values = monitorRunResults.triggerResults.values + assertEquals(1, values.size) + @Suppress("UNCHECKED_CAST") + val totalHits = ((monitorRunResults.inputResults.results[0]["hits"] as Map)["total"] as Map) ["value"] + assertEquals(2, totalHits) + @Suppress("UNCHECKED_CAST") + val docIds = ((monitorRunResults.inputResults.results[0]["hits"] as Map)["hits"] as List>).map { it["_id"]!! } + assertEquals(listOf("5", "6"), docIds.sorted()) + } + } + } } private fun assertFindings( diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index f235c6e9e..8a61b7266 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -20,6 +20,8 @@ import org.opensearch.alerting.action.GetMonitorRequest import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions @@ -33,11 +35,14 @@ import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.Finding import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.Table +import org.opensearch.index.IndexService import org.opensearch.index.query.TermQueryBuilder import org.opensearch.index.reindex.ReindexPlugin import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.painless.PainlessPlugin import org.opensearch.plugins.Plugin import org.opensearch.rest.RestRequest +import org.opensearch.script.mustache.MustachePlugin import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchSingleNodeTestCase @@ -99,17 +104,36 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { /** A test index that can be used across tests. Feel free to add new fields but don't remove any. */ protected fun createTestIndex() { + val mapping = XContentFactory.jsonBuilder() + mapping.startObject() + .startObject("properties") + .startObject("test_strict_date_time") + .field("type", "date") + .field("format", "strict_date_time") + .endObject() + .startObject("test_field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + createIndex( - index, Settings.EMPTY, - """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" } - } - """.trimIndent() + index, Settings.EMPTY, mapping ) } + private fun createIndex( + index: String?, + settings: Settings?, + mappings: XContentBuilder?, + ): IndexService? { + val createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings) + if (mappings != null) { + createIndexRequestBuilder.setMapping(mappings) + } + return this.createIndex(index, createIndexRequestBuilder) + } + protected fun indexDoc(index: String, id: String, doc: String) { client().prepareIndex(index).setId(id) .setSource(doc, XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get() @@ -164,7 +188,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { protected fun searchFindings( id: String, indices: String = AlertIndices.ALL_FINDING_INDEX_PATTERN, - refresh: Boolean = true + refresh: Boolean = true, ): List { if (refresh) refreshIndex(indices) @@ -199,7 +223,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { protected fun getMonitorResponse( monitorId: String, version: Long = 1L, - fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE + fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE, ) = client().execute( GetMonitorAction.INSTANCE, GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext) @@ -210,7 +234,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { ).get() override fun getPlugins(): List> { - return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java) + return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java, MustachePlugin::class.java, PainlessPlugin::class.java) } override fun resetNodeAfterTest(): Boolean { From a1e040846586081cdf8150bcd12599e418324171 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 27 Feb 2023 23:47:38 +0100 Subject: [PATCH 09/18] Added workflow execution run result and refactored ExecutionWorkflowResponse class Code adjusted according to comments Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 4 +- .../alerting/BucketLevelMonitorRunner.kt | 22 +---- .../alerting/DocumentLevelMonitorRunner.kt | 8 +- .../org/opensearch/alerting/InputService.kt | 18 ++-- .../opensearch/alerting/WorkflowService.kt | 87 ++++++++++++------- .../alerting/action/ExecuteWorkflowRequest.kt | 14 ++- .../action/ExecuteWorkflowResponse.kt | 28 +++++- .../alerting/model/WorkflowRunResult.kt | 41 +++++++++ .../resthandler/RestExecuteWorkflowAction.kt | 74 ---------------- .../TransportExecuteWorkflowAction.kt | 14 ++- .../workflow/CompositeWorkflowRunner.kt | 22 +++-- .../alerting/workflow/WorkflowRunner.kt | 4 +- .../workflow/WorkflowRunnerService.kt | 4 +- .../alerting/alerts/finding_mapping.json | 2 +- .../transport/WorkflowSingleNodeTestCase.kt | 21 +++-- 15 files changed, 200 insertions(+), 163 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt delete mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index b6c41c24c..86fef2706 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -28,7 +28,6 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestExecuteMonitorAction -import org.opensearch.alerting.resthandler.RestExecuteWorkflowAction import org.opensearch.alerting.resthandler.RestGetAlertsAction import org.opensearch.alerting.resthandler.RestGetDestinationsAction import org.opensearch.alerting.resthandler.RestGetEmailAccountAction @@ -172,8 +171,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestGetEmailGroupAction(), RestGetDestinationsAction(), RestGetAlertsAction(), - RestGetFindingsAction(), - RestExecuteWorkflowAction() + RestGetFindingsAction() ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 422ca24a0..4c8bdb20a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -40,9 +40,7 @@ import org.opensearch.commons.alerting.model.action.PerAlertActionScope import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import org.opensearch.commons.alerting.util.string import org.opensearch.index.query.BoolQueryBuilder -import org.opensearch.index.query.MatchQueryBuilder import org.opensearch.index.query.QueryBuilders -import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.rest.RestStatus import org.opensearch.script.Script import org.opensearch.script.ScriptType @@ -63,7 +61,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { periodStart: Instant, periodEnd: Instant, dryrun: Boolean, - workflowExecutionContext: WorkflowRunContext? + workflowRunContext: WorkflowRunContext? ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -123,7 +121,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { periodStart, periodEnd, monitorResult.inputResults, - workflowExecutionContext + workflowRunContext ) if (firstIteration) { firstPageOfInputResults = inputResults @@ -160,7 +158,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { periodStart, periodEnd, !dryrun && monitor.id != Monitor.NO_ID, - workflowExecutionContext + workflowRunContext ) } else { emptyList() @@ -396,18 +394,6 @@ object BucketLevelMonitorRunner : MonitorRunner() { val queryBuilder = if (input.query.query() == null) BoolQueryBuilder() else QueryBuilders.boolQuery().must(source.query()) queryBuilder.filter(QueryBuilders.termsQuery(fieldName, bucketValues)) - - if (workflowRunContext != null && !workflowRunContext.indexToDocIds.isNullOrEmpty()) { - workflowRunContext.indexToDocIds.forEach { entry -> - queryBuilder - .should() - .add( - BoolQueryBuilder() - .must(MatchQueryBuilder("_index", entry.key)) - .must(TermsQueryBuilder("_id", entry.value)) - ) - } - } sr.source().query(queryBuilder) } val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(sr, it) } @@ -445,7 +431,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { index = it.key, timestamp = Instant.now(), docLevelQueries = listOf(), - workflowExecutionId = workflowExecutionId + executionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index fa7913670..30b2370d6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -127,9 +127,6 @@ object DocumentLevelMonitorRunner : MonitorRunner() { } } - // If monitor execution is triggered from a workflow - val indexToRelatedDocIdsMap = workflowRunContext?.indexToDocIds - indices.forEach { indexName -> // Prepare lastRunContext for each index val indexLastRunContext = lastRunContext.getOrPut(indexName) { @@ -159,6 +156,9 @@ object DocumentLevelMonitorRunner : MonitorRunner() { // Prepare DocumentExecutionContext for each index val docExecutionContext = DocumentExecutionContext(queries, indexLastRunContext, indexUpdatedRunContext) + // If monitor execution is triggered from a workflow + val indexToRelatedDocIdsMap = workflowRunContext?.indexToDocIds + val matchingDocs = getMatchingDocs( monitor, monitorCtx, @@ -338,7 +338,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() { index = docIndex[1], docLevelQueries = docLevelQueries, timestamp = Instant.now(), - workflowExecutionId = workflowExecutionId + executionId = workflowExecutionId ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index e52542ae9..dcc9353be 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -74,15 +74,16 @@ class InputService( "period_end" to periodEnd.toEpochMilli() ) + // Deep copying query before passing it to rewriteQuery since otherwise, the monitor.input is modified directly + // which causes a strange bug where the rewritten query persists on the Monitor across executions + val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) + // Rewrite query to consider the doc ids per given index if (chainedFindingExist(indexToDocIds)) { - val updatedSourceQuery = updateInputQueryWithFindingDocIds(input.query.query(), indexToDocIds!!) - input.query.query(updatedSourceQuery) + val updatedSourceQuery = updateInputQueryWithFindingDocIds(rewrittenQuery.query(), indexToDocIds!!) + rewrittenQuery.query(updatedSourceQuery) } - // Deep copying query before passing it to rewriteQuery since otherwise, the monitor.input is modified directly - // which causes a strange bug where the rewritten query persists on the Monitor across executions - val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) val searchSource = scriptService.compile( Script( ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, @@ -122,6 +123,13 @@ class InputService( } } + /** + * Extends the given query builder with query that filters the given indices with the given doc ids per index + * Used whenever we want to select the documents that were found in chained delegate execution of the current workflow run + * + * @param query Original bucket monitor query + * @param indexToDocIds Map of finding doc ids grouped by index + */ private fun updateInputQueryWithFindingDocIds( query: QueryBuilder, indexToDocIds: Map>, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt index 2d06039c3..b2b2ab95b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -33,43 +33,64 @@ import java.util.stream.Collectors private val log = LogManager.getLogger(WorkflowService::class.java) +/** + * Contains util methods used in workflow execution + */ class WorkflowService( val client: Client, val xContentRegistry: NamedXContentRegistry, ) { - - suspend fun getFindingDocIdsPerMonitorExecution(chainedMonitor: Monitor, workflowExecutionId: String): Map> { - // Search findings index per monitor and workflow execution id - val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, chainedMonitor.id)) - .filter(QueryBuilders.termQuery(Finding.WORKFLOW_EXECUTION_ID_FIELD, workflowExecutionId)) - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder() - .query(bqb) - .version(true) - .seqNoAndPrimaryTerm(true) - ) - .indices(chainedMonitor.dataSources.findingsIndex) - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - - // Get the findings docs - val findings = mutableListOf() - for (hit in searchResponse.hits) { - val xcp = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val finding = Finding.parse(xcp) - findings.add(finding) - } - // Based on the findings get the document ids - val indexToRelatedDocIdsMap = mutableMapOf>() - for (finding in findings) { - indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) + /** + * Returns finding doc ids per index for the given workflow execution + * Used for pre-filtering the dataset in the case of creating a workflow with chained findings + * + * @param chainedMonitor Monitor that is previously executed + * @param workflowExecutionId Execution id of the current workflow + */ + suspend fun getFindingDocIdsByExecutionId(chainedMonitor: Monitor, workflowExecutionId: String): Map> { + try { + // Search findings index per monitor and workflow execution id + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, chainedMonitor.id)) + .filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + ) + .indices(chainedMonitor.dataSources.findingsIndex) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + + // Get the findings docs + val findings = mutableListOf() + for (hit in searchResponse.hits) { + val xcp = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val finding = Finding.parse(xcp) + findings.add(finding) + } + // Based on the findings get the document ids + val indexToRelatedDocIdsMap = mutableMapOf>() + for (finding in findings) { + indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) + } + return indexToRelatedDocIdsMap + } catch (t: Exception) { + log.error("Error getting finding doc ids: ${t.message}") + throw AlertingException.wrap(t) } - return indexToRelatedDocIdsMap } - suspend fun searchMonitors(monitors: List, size: Int, owner: String?): List { + /** + * Returns the list of monitors for the given ids + * Used in workflow execution in order to figure out the monitor type + * + * @param monitors List of monitor ids + * @param size Expected number of monitors + */ + suspend fun getMonitorsById(monitors: List, size: Int): List { val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) val searchRequest = SearchRequest() @@ -83,10 +104,10 @@ class WorkflowService( .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - return buildMonitors(searchResponse) + return parseMonitors(searchResponse) } - private fun buildMonitors(response: SearchResponse): List { + private fun parseMonitors(response: SearchResponse): List { if (response.isTimedOut) { log.error("Request for getting monitors timeout") throw OpenSearchException("Cannot determine that the ${ScheduledJob.SCHEDULED_JOBS_INDEX} index is healthy") @@ -126,7 +147,7 @@ class WorkflowService( } else throw IllegalStateException("Delegate monitors don't exist $monitorId") // Search findings index per monitor and workflow execution id val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, monitor.id)) - .filter(QueryBuilders.termQuery(Finding.WORKFLOW_EXECUTION_ID_FIELD, workflowExecutionId)) + .filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) val searchRequest = SearchRequest() .source( SearchSourceBuilder() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt index 26258dd7f..7f2aea404 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt @@ -7,12 +7,16 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.ValidateActions import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.unit.TimeValue import org.opensearch.commons.alerting.model.Workflow import java.io.IOException +/** + * A class containing workflow details. + */ class ExecuteWorkflowRequest : ActionRequest { val dryrun: Boolean val requestEnd: TimeValue @@ -23,7 +27,7 @@ class ExecuteWorkflowRequest : ActionRequest { dryrun: Boolean, requestEnd: TimeValue, workflowId: String?, - workflow: Workflow? + workflow: Workflow?, ) : super() { this.dryrun = dryrun this.requestEnd = requestEnd @@ -42,7 +46,13 @@ class ExecuteWorkflowRequest : ActionRequest { ) override fun validate(): ActionRequestValidationException? { - return null + var validationException: ActionRequestValidationException? = null + if (workflowId == null || workflow == null) { + validationException = ValidateActions.addValidationError( + "Both workflow and workflow id are missing", validationException + ) + } + return validationException } @Throws(IOException::class) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt index 440fa2984..5a35ca8c5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt @@ -7,38 +7,62 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder import java.io.IOException +import java.time.Instant class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { val workflowRunResult: List> + val executionStartTime: Instant + val executionEndTime: Instant + val status: WorkflowRunResult.WorkflowExecutionStatus - constructor(monitorRunResult: List>) : super() { + constructor( + monitorRunResult: List>, + executionStartTime: Instant, + executionEndTime: Instant, + status: WorkflowRunResult.WorkflowExecutionStatus + ) : super() { this.workflowRunResult = monitorRunResult + this.executionStartTime = executionStartTime + this.executionEndTime = executionEndTime + this.status = status } @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) } + sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, + sin.readInstant(), + sin.readInstant(), + sin.readEnum(WorkflowRunResult.WorkflowExecutionStatus::class.java) ) @Throws(IOException::class) override fun writeTo(out: StreamOutput) { out.writeList(workflowRunResult) + out.writeInstant(executionStartTime) + out.writeInstant(executionEndTime) + out.writeEnum(status) } @Throws(IOException::class) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject("workflow_run_result") builder.startArray() for (monitorResult in workflowRunResult) { monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) } builder.endArray() + builder.endObject() + builder.field("execution_start_time", executionStartTime) + builder.field("execution_end_time", executionEndTime) + builder.field("status", status) return builder } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt new file mode 100644 index 000000000..f531b8477 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.model + +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.io.stream.Writeable +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import java.io.IOException + +data class WorkflowRunResult( + val workflowRunResult: List> +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) } + ) + + override fun writeTo(out: StreamOutput) { + out.writeList(workflowRunResult) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startArray() + for (monitorResult in workflowRunResult) { + monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) + } + builder.endArray() + return builder + } + + enum class WorkflowExecutionStatus(val value: String) { + SUCCESSFUL("successful"), + UNSUCCESSFUL("unsuccessful") + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt deleted file mode 100644 index ad2990d67..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.alerting.resthandler - -import org.apache.logging.log4j.LogManager -import org.opensearch.alerting.AlertingPlugin -import org.opensearch.alerting.action.ExecuteWorkflowAction -import org.opensearch.alerting.action.ExecuteWorkflowRequest -import org.opensearch.client.node.NodeClient -import org.opensearch.common.unit.TimeValue -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.alerting.model.Workflow -import org.opensearch.rest.BaseRestHandler -import org.opensearch.rest.RestHandler -import org.opensearch.rest.RestRequest -import org.opensearch.rest.action.RestToXContentListener -import java.time.Instant - -private val log = LogManager.getLogger(RestExecuteWorkflowAction::class.java) - -class RestExecuteWorkflowAction : BaseRestHandler() { - - override fun getName(): String = "execute_workflow_action" - - override fun routes(): List { - return listOf() - } - - override fun replacedRoutes(): MutableList { - return mutableListOf( - RestHandler.ReplacedRoute( - RestRequest.Method.POST, - "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_execute", - RestRequest.Method.POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_WORKFLOW_BASE_URI}/{workflowID}/_execute" - ), - RestHandler.ReplacedRoute( - RestRequest.Method.POST, - "${AlertingPlugin.WORKFLOW_BASE_URI}/_execute", - RestRequest.Method.POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_WORKFLOW_BASE_URI}/_execute" - ) - ) - } - - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { - log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/_execute") - - return RestChannelConsumer { channel -> - val dryrun = request.paramAsBoolean("dryrun", false) - val requestEnd = request.paramAsTime("period_end", TimeValue(Instant.now().toEpochMilli())) - - if (request.hasParam("workflowID")) { - val workflowId = request.param("workflowID") - val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, workflowId, null) - client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) - } else { - val xcp = request.contentParser() - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val workflow = Workflow.parse(xcp, Workflow.NO_ID, Workflow.NO_VERSION) - val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, null, workflow) - client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) - } - } - } - - override fun responseParams(): Set { - return setOf("dryrun", "period_end", "workflowID") - } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt index 42367971f..830b1e80e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt @@ -19,6 +19,7 @@ import org.opensearch.alerting.MonitorRunnerService import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.ExecuteWorkflowRequest import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.workflow.WorkflowRunnerService import org.opensearch.client.Client @@ -54,17 +55,22 @@ class TransportExecuteWorkflowAction @Inject constructor( client.threadPool().threadContext.stashContext().use { val executeWorkflow = fun(workflow: Workflow) { - // Launch the coroutine with the clients threadContext. This is needed to preserve authentication information - // stored on the threadContext set by the security plugin when using the Alerting plugin with the Security plugin. - // runner.launch(ElasticThreadContextElement(client.threadPool().threadContext)) { runner.launch { + val startTime = Instant.now() val (periodStart, periodEnd) = workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) try { val workflowRunResult = WorkflowRunnerService.runJob(workflow, periodStart, periodEnd, execWorkflowRequest.dryrun) withContext(Dispatchers.IO) { - actionListener.onResponse(ExecuteWorkflowResponse(workflowRunResult)) + actionListener.onResponse( + ExecuteWorkflowResponse( + workflowRunResult.workflowRunResult, + startTime, + Instant.now(), + WorkflowRunResult.WorkflowExecutionStatus.SUCCESSFUL + ) + ) } } catch (e: Exception) { log.error("Unexpected error running workflow", e) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 4a08411d2..4bb175f36 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -5,19 +5,23 @@ package org.opensearch.alerting.workflow +import org.apache.logging.log4j.LogManager import org.opensearch.alerting.BucketLevelMonitorRunner import org.opensearch.alerting.DocumentLevelMonitorRunner import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.QueryLevelMonitorRunner import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.util.isDocLevelMonitor import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.alerting.util.isBucketLevelMonitor import java.time.Instant +import java.time.LocalDateTime import java.util.UUID object CompositeWorkflowRunner : WorkflowRunner() { + private val logger = LogManager.getLogger(javaClass) override suspend fun runWorkflow( workflow: Workflow, @@ -25,17 +29,18 @@ object CompositeWorkflowRunner : WorkflowRunner() { periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): List> { - val workflowExecutionId = UUID.randomUUID().toString() + ): WorkflowRunResult { + val workflowExecutionId = UUID.randomUUID().toString() + LocalDateTime.now() + logger.debug("Workflow ${workflow.id} in $workflowExecutionId execution is running") val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } // Fetch monitors by ids - val monitors = monitorCtx.workflowService!!.searchMonitors(delegates.map { it.monitorId }, delegates.size, workflow.owner) + val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) // Validate the monitors size if (delegates.size != monitors.size) { val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() - throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds") + throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") } val monitorsById = monitors.associateBy { it.id } @@ -43,14 +48,14 @@ object CompositeWorkflowRunner : WorkflowRunner() { for (delegate in delegates) { var delegateMonitor = monitorsById[delegate.monitorId] - ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId}") + ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") var indexToDocIds = mapOf>() if (delegate.chainedFindings != null) { val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] - ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId}") + ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") - indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsPerMonitorExecution(chainedMonitor, workflowExecutionId) + indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, workflowExecutionId) } val workflowRunContext = WorkflowRunContext(delegate.chainedFindings?.monitorId, workflowExecutionId, indexToDocIds) @@ -85,6 +90,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { } resultList.add(runResult) } - return resultList + logger.debug("Workflow ${workflow.id} in $workflowExecutionId finished") + return WorkflowRunResult(resultList) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt index eb14e75fe..a7272a3dc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt @@ -6,7 +6,7 @@ package org.opensearch.alerting.workflow import org.opensearch.alerting.MonitorRunnerExecutionContext -import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.commons.alerting.model.Workflow import java.time.Instant @@ -17,5 +17,5 @@ abstract class WorkflowRunner { periodStart: Instant, periodEnd: Instant, dryRun: Boolean - ): List> + ): WorkflowRunResult } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt index 44a9fe9b7..bd9e4a0c2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunnerService.kt @@ -19,7 +19,7 @@ import org.opensearch.alerting.TriggerService import org.opensearch.alerting.WorkflowService import org.opensearch.alerting.alerts.AlertIndices import org.opensearch.alerting.core.JobRunner -import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.model.destination.DestinationContextFactory import org.opensearch.alerting.script.TriggerExecutionContext import org.opensearch.alerting.settings.AlertingSettings.Companion.ALERT_BACKOFF_COUNT @@ -197,7 +197,7 @@ object WorkflowRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompo } } - suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): List> { + suspend fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean): WorkflowRunResult { val workflow = job as Workflow return CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun) } diff --git a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json index fdeb0c2a1..bd916199b 100644 --- a/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json +++ b/alerting/src/main/resources/org/opensearch/alerting/alerts/finding_mapping.json @@ -52,7 +52,7 @@ "timestamp": { "type": "long" }, - "workflow_execution_id": { + "execution_id": { "type": "keyword" } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 6ccd4c62c..b79cad126 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -7,11 +7,11 @@ package org.opensearch.alerting.transport import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope import org.opensearch.action.support.WriteRequest -import org.opensearch.common.xcontent.XContentParser import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.ExecuteWorkflowRequest import org.opensearch.alerting.action.ExecuteWorkflowResponse import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.action.AlertingActions import org.opensearch.commons.alerting.action.DeleteWorkflowRequest @@ -36,7 +36,11 @@ import java.time.Instant @ThreadLeakScope(ThreadLeakScope.Scope.NONE) abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { - protected fun searchWorkflow(id: String, indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, refresh: Boolean = true): Workflow? { + protected fun searchWorkflow( + id: String, + indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, + refresh: Boolean = true, + ): Workflow? { try { if (refresh) refreshIndex(indices) } catch (e: Exception) { @@ -61,7 +65,11 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { }.first() } - protected fun upsertWorkflow(workflow: Workflow, id: String = Workflow.NO_ID, method: RestRequest.Method = RestRequest.Method.POST): IndexWorkflowResponse? { + protected fun upsertWorkflow( + workflow: Workflow, + id: String = Workflow.NO_ID, + method: RestRequest.Method = RestRequest.Method.POST, + ): IndexWorkflowResponse? { val request = IndexWorkflowRequest( workflowId = id, seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -77,9 +85,12 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { protected fun getWorkflowById( id: String, version: Long = 1L, - fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE + fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE, ): GetWorkflowResponse { - return client().execute(AlertingActions.GET_WORKFLOW_ACTION_TYPE, GetWorkflowRequest(id, version, RestRequest.Method.GET, fetchSourceContext)).get() + return client().execute( + AlertingActions.GET_WORKFLOW_ACTION_TYPE, + GetWorkflowRequest(id, version, RestRequest.Method.GET, fetchSourceContext) + ).get() } protected fun deleteWorkflow(workflowId: String) { From a77d9bb9e0cad278aac952f65f31b54f458d13e0 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Wed, 1 Mar 2023 19:20:35 +0100 Subject: [PATCH 10/18] Added integration tests for workflow execution. PR comments addressed Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 2 + .../opensearch/alerting/WorkflowService.kt | 29 +++-- .../alerting/action/ExecuteWorkflowRequest.kt | 2 +- .../action/ExecuteWorkflowResponse.kt | 41 +----- .../alerting/model/WorkflowRunResult.kt | 30 +++-- .../resthandler/RestExecuteWorkflowAction.kt | 57 +++++++++ .../TransportExecuteWorkflowAction.kt | 7 +- .../transport/TransportGetWorkflowAction.kt | 6 - .../workflow/CompositeWorkflowRunner.kt | 119 ++++++++++-------- .../opensearch/alerting/WorkflowRunnerIT.kt | 75 ++++++++++- .../transport/AlertingSingleNodeTestCase.kt | 6 + .../transport/WorkflowSingleNodeTestCase.kt | 2 +- 12 files changed, 250 insertions(+), 126 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 86fef2706..5bf49b6e8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -28,6 +28,7 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestExecuteMonitorAction +import org.opensearch.alerting.resthandler.RestExecuteWorkflowAction import org.opensearch.alerting.resthandler.RestGetAlertsAction import org.opensearch.alerting.resthandler.RestGetDestinationsAction import org.opensearch.alerting.resthandler.RestGetEmailAccountAction @@ -163,6 +164,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestIndexMonitorAction(), RestSearchMonitorAction(settings, clusterService), RestExecuteMonitorAction(), + RestExecuteWorkflowAction(), RestAcknowledgeAlertAction(), RestScheduledJobStatsHandler("_alerting"), RestSearchEmailAccountAction(), diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt index b2b2ab95b..e4b34fcce 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -91,20 +91,25 @@ class WorkflowService( * @param size Expected number of monitors */ suspend fun getMonitorsById(monitors: List, size: Int): List { - val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) + try { + val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder() - .query(bqb) - .version(true) - .seqNoAndPrimaryTerm(true) - .size(size) - ) - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + val searchRequest = SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(size) + ) + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - return parseMonitors(searchResponse) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + return parseMonitors(searchResponse) + } catch (e: Exception) { + log.error("Error getting monitors: ${e.message}") + throw AlertingException.wrap(e) + } } private fun parseMonitors(response: SearchResponse): List { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt index 7f2aea404..2d97bbdcc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt @@ -47,7 +47,7 @@ class ExecuteWorkflowRequest : ActionRequest { override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null - if (workflowId == null || workflow == null) { + if (workflowId == null && workflow == null) { validationException = ValidateActions.addValidationError( "Both workflow and workflow id are missing", validationException ) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt index 5a35ca8c5..a58eff9ae 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt @@ -6,7 +6,6 @@ package org.opensearch.alerting.action import org.opensearch.action.ActionResponse -import org.opensearch.alerting.model.MonitorRunResult import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.common.io.stream.StreamInput import org.opensearch.common.io.stream.StreamOutput @@ -14,55 +13,27 @@ import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.ToXContentObject import org.opensearch.common.xcontent.XContentBuilder import java.io.IOException -import java.time.Instant class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { - - val workflowRunResult: List> - val executionStartTime: Instant - val executionEndTime: Instant - val status: WorkflowRunResult.WorkflowExecutionStatus - + val workflowRunResult: WorkflowRunResult constructor( - monitorRunResult: List>, - executionStartTime: Instant, - executionEndTime: Instant, - status: WorkflowRunResult.WorkflowExecutionStatus + workflowRunResult: WorkflowRunResult ) : super() { - this.workflowRunResult = monitorRunResult - this.executionStartTime = executionStartTime - this.executionEndTime = executionEndTime - this.status = status + this.workflowRunResult = workflowRunResult } @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, - sin.readInstant(), - sin.readInstant(), - sin.readEnum(WorkflowRunResult.WorkflowExecutionStatus::class.java) + WorkflowRunResult(sin) ) @Throws(IOException::class) override fun writeTo(out: StreamOutput) { - out.writeList(workflowRunResult) - out.writeInstant(executionStartTime) - out.writeInstant(executionEndTime) - out.writeEnum(status) + workflowRunResult.writeTo(out) } @Throws(IOException::class) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject("workflow_run_result") - builder.startArray() - for (monitorResult in workflowRunResult) { - monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) - } - builder.endArray() - builder.endObject() - builder.field("execution_start_time", executionStartTime) - builder.field("execution_end_time", executionEndTime) - builder.field("status", status) - return builder + return workflowRunResult.toXContent(builder, params) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt index f531b8477..596875344 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt @@ -11,31 +11,43 @@ import org.opensearch.common.io.stream.Writeable import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentBuilder import java.io.IOException +import java.lang.Exception +import java.time.Instant data class WorkflowRunResult( - val workflowRunResult: List> + val workflowRunResult: List> = mutableListOf(), + val executionStartTime: Instant, + val executionEndTime: Instant, + val executionId: String, + val error: Exception? = null ) : Writeable, ToXContent { @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") constructor(sin: StreamInput) : this( - sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) } + sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, + sin.readInstant(), + sin.readInstant(), + sin.readString(), + sin.readException() ) override fun writeTo(out: StreamOutput) { out.writeList(workflowRunResult) + out.writeInstant(executionStartTime) + out.writeInstant(executionEndTime) + out.writeString(executionId) + out.writeException(error) } override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startArray() + builder.startObject().startArray("workflow_run_result") for (monitorResult in workflowRunResult) { monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) } - builder.endArray() + builder.endArray().field("execution_start_time", executionStartTime) + .field("execution_end_time", executionEndTime) + .field("error", error?.message).endObject() return builder } - - enum class WorkflowExecutionStatus(val value: String) { - SUCCESSFUL("successful"), - UNSUCCESSFUL("unsuccessful") - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt new file mode 100644 index 000000000..bf93bc590 --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.resthandler + +import org.apache.logging.log4j.LogManager +import org.opensearch.alerting.AlertingPlugin +import org.opensearch.alerting.action.ExecuteWorkflowAction +import org.opensearch.alerting.action.ExecuteWorkflowRequest +import org.opensearch.client.node.NodeClient +import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.rest.BaseRestHandler +import org.opensearch.rest.RestHandler +import org.opensearch.rest.RestRequest +import org.opensearch.rest.action.RestToXContentListener +import java.time.Instant + +private val log = LogManager.getLogger(RestExecuteWorkflowAction::class.java) + +class RestExecuteWorkflowAction : BaseRestHandler() { + + override fun getName(): String = "execute_workflow_action" + + override fun routes(): List { + return listOf() + } + + override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/_execute") + + return RestChannelConsumer { channel -> + val dryrun = request.paramAsBoolean("dryrun", false) + val requestEnd = request.paramAsTime("period_end", TimeValue(Instant.now().toEpochMilli())) + + if (request.hasParam("workflowID")) { + val workflowId = request.param("workflowID") + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, workflowId, null) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } else { + val xcp = request.contentParser() + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val workflow = Workflow.parse(xcp, Workflow.NO_ID, Workflow.NO_VERSION) + val execWorkflowRequest = ExecuteWorkflowRequest(dryrun, requestEnd, null, workflow) + client.execute(ExecuteWorkflowAction.INSTANCE, execWorkflowRequest, RestToXContentListener(channel)) + } + } + } + + override fun responseParams(): Set { + return setOf("dryrun", "period_end", "workflowID") + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt index 830b1e80e..3a7fb56b6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt @@ -19,7 +19,6 @@ import org.opensearch.alerting.MonitorRunnerService import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.ExecuteWorkflowRequest import org.opensearch.alerting.action.ExecuteWorkflowResponse -import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.workflow.WorkflowRunnerService import org.opensearch.client.Client @@ -56,7 +55,6 @@ class TransportExecuteWorkflowAction @Inject constructor( client.threadPool().threadContext.stashContext().use { val executeWorkflow = fun(workflow: Workflow) { runner.launch { - val startTime = Instant.now() val (periodStart, periodEnd) = workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) try { @@ -65,10 +63,7 @@ class TransportExecuteWorkflowAction @Inject constructor( withContext(Dispatchers.IO) { actionListener.onResponse( ExecuteWorkflowResponse( - workflowRunResult.workflowRunResult, - startTime, - Instant.now(), - WorkflowRunResult.WorkflowExecutionStatus.SUCCESSFUL + workflowRunResult ) ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt index f0802da4d..a816e2396 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt @@ -60,12 +60,6 @@ class TransportGetWorkflowAction @Inject constructor( return } - /* - * Remove security context before you call elasticsearch api's. By this time, permissions required - * to call this api are validated. - * Once system-indices [https://github.com/opendistro-for-elasticsearch/security/issues/666] is done, we - * might further improve this logic. Also change try to kotlin-use for auto-closable. - */ client.threadPool().threadContext.stashContext().use { client.get( getRequest, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 4bb175f36..9f6d9be69 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -12,8 +12,11 @@ import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.QueryLevelMonitorRunner import org.opensearch.alerting.model.MonitorRunResult import org.opensearch.alerting.model.WorkflowRunResult +import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.isDocLevelMonitor import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.alerting.util.isBucketLevelMonitor import java.time.Instant @@ -31,66 +34,76 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun: Boolean ): WorkflowRunResult { val workflowExecutionId = UUID.randomUUID().toString() + LocalDateTime.now() + var workflowResult = WorkflowRunResult(mutableListOf(), periodStart, periodEnd, workflowExecutionId) logger.debug("Workflow ${workflow.id} in $workflowExecutionId execution is running") + try { + val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + var monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + // Validate the monitors size + validateMonitorSize(delegates, monitors, workflow) - val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } - // Fetch monitors by ids - val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + val monitorsById = monitors.associateBy { it.id } + val resultList = mutableListOf>() - // Validate the monitors size - if (delegates.size != monitors.size) { - val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() - throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") - } + for (delegate in delegates) { + var indexToDocIds = mapOf>() + var delegateMonitor: Monitor + delegateMonitor = monitorsById[delegate.monitorId] + ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + if (delegate.chainedFindings != null) { + val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] + ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") + indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, workflowExecutionId) + } - val monitorsById = monitors.associateBy { it.id } - val resultList = mutableListOf>() + val workflowRunContext = WorkflowRunContext(delegate.chainedFindings?.monitorId, workflowExecutionId, indexToDocIds) - for (delegate in delegates) { - var delegateMonitor = monitorsById[delegate.monitorId] - ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") - - var indexToDocIds = mapOf>() - if (delegate.chainedFindings != null) { - val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] - ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") - - indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, workflowExecutionId) + val runResult = if (delegateMonitor.isBucketLevelMonitor()) { + BucketLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else if (delegateMonitor.isDocLevelMonitor()) { + DocumentLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } else { + QueryLevelMonitorRunner.runMonitor( + delegateMonitor, + monitorCtx, + periodStart, + periodEnd, + dryRun, + workflowRunContext + ) + } + resultList.add(runResult) } + logger.debug("Workflow ${workflow.id} in $workflowExecutionId finished") + return workflowResult.copy(workflowRunResult = resultList) + } catch (e: Exception) { + logger.error("Failed to execute workflow. Error: ${e.message}") + return workflowResult.copy(error = AlertingException.wrap(e)) + } + } - val workflowRunContext = WorkflowRunContext(delegate.chainedFindings?.monitorId, workflowExecutionId, indexToDocIds) - - val runResult = if (delegateMonitor.isBucketLevelMonitor()) { - BucketLevelMonitorRunner.runMonitor( - delegateMonitor, - monitorCtx, - periodStart, - periodEnd, - dryRun, - workflowRunContext - ) - } else if (delegateMonitor.isDocLevelMonitor()) { - DocumentLevelMonitorRunner.runMonitor( - delegateMonitor, - monitorCtx, - periodStart, - periodEnd, - dryRun, - workflowRunContext - ) - } else { - QueryLevelMonitorRunner.runMonitor( - delegateMonitor, - monitorCtx, - periodStart, - periodEnd, - dryRun, - workflowRunContext - ) - } - resultList.add(runResult) + private fun validateMonitorSize( + delegates: List, + monitors: List, + workflow: Workflow, + ) { + if (delegates.size != monitors.size) { + val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() + throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") } - logger.debug("Workflow ${workflow.id} in $workflowExecutionId finished") - return WorkflowRunResult(resultList) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index bebc17c39..7fb363384 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -9,6 +9,7 @@ import org.junit.Assert import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.model.DocumentLevelTriggerRunResult import org.opensearch.alerting.transport.WorkflowSingleNodeTestCase +import org.opensearch.alerting.util.AlertingException import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest import org.opensearch.commons.alerting.action.AlertingActions import org.opensearch.commons.alerting.action.GetAlertsRequest @@ -20,13 +21,16 @@ import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.alerting.model.Table import org.opensearch.index.query.QueryBuilders +import org.opensearch.rest.RestStatus import org.opensearch.script.Script import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder import org.opensearch.search.builder.SearchSourceBuilder +import java.lang.Exception import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit +import java.util.concurrent.ExecutionException class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { @@ -105,7 +109,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val workflowId = workflowResponse.id val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! - val monitorsRunResults = executeWorkflowResponse.workflowRunResult + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult assertEquals(2, monitorsRunResults.size) assertEquals(monitor1.name, monitorsRunResults[0].monitorName) @@ -209,7 +213,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! assertNotNull(executeWorkflowResponse) - for (monitorRunResults in executeWorkflowResponse.workflowRunResult) { + for (monitorRunResults in executeWorkflowResponse.workflowRunResult.workflowRunResult) { if (bucketLevelMonitorResponse.monitor.name == monitorRunResults.monitorName) { val searchResult = monitorRunResults.inputResults.results.first() @Suppress("UNCHECKED_CAST") @@ -347,7 +351,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! assertNotNull(executeWorkflowResponse) - for (monitorRunResults in executeWorkflowResponse.workflowRunResult) { + for (monitorRunResults in executeWorkflowResponse.workflowRunResult.workflowRunResult) { when (monitorRunResults.monitorName) { // Verify first doc level monitor execution, alerts and findings docLevelMonitorResponse.monitor.name -> { @@ -403,6 +407,71 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { } } + fun `test execute workflow inout error`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + deleteIndex(index) + + val response = executeWorkflow(workflowById, workflowById.id, false)!! + + assertNotNull(response.workflowRunResult.error) + assertTrue(response.workflowRunResult.error is AlertingException) + assertEquals(RestStatus.NOT_FOUND, (response.workflowRunResult.error as AlertingException).status) + assertEquals("Configured indices are not found: [$index]", (response.workflowRunResult.error as AlertingException).message) + } + + fun `test execute workflow wrong workflow id`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + val monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + var exception: Exception? = null + val badWorkflowId = getWorkflowResponse.id + "bad" + try { + executeWorkflow(id = badWorkflowId) + } catch (ex: Exception) { + exception = ex + } + assertTrue(exception is ExecutionException) + assertTrue(exception!!.cause is AlertingException) + assertEquals(RestStatus.NOT_FOUND, (exception.cause as AlertingException).status) + assertEquals("Can't find workflow with id: $badWorkflowId", exception.cause!!.message) + } + private fun assertFindings( monitorId: String, customFindingsIndex: String, diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 8a61b7266..8f14e4306 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -6,6 +6,7 @@ package org.opensearch.alerting.transport import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope +import org.opensearch.action.admin.indices.delete.DeleteIndexRequest import org.opensearch.action.admin.indices.get.GetIndexRequestBuilder import org.opensearch.action.admin.indices.get.GetIndexResponse import org.opensearch.action.admin.indices.refresh.RefreshAction @@ -233,6 +234,11 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE) ).get() + protected fun deleteIndex(index: String) { + val response = client().admin().indices().delete(DeleteIndexRequest(index)).get() + assertTrue("Unable to delete index", response.isAcknowledged()) + } + override fun getPlugins(): List> { return listOf(AlertingPlugin::class.java, ReindexPlugin::class.java, MustachePlugin::class.java, PainlessPlugin::class.java) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index b79cad126..9bc65914e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -100,7 +100,7 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { ).get() } - protected fun executeWorkflow(workflow: Workflow, id: String, dryRun: Boolean = true): ExecuteWorkflowResponse? { + protected fun executeWorkflow(workflow: Workflow? = null, id: String? = null, dryRun: Boolean = true): ExecuteWorkflowResponse? { val request = ExecuteWorkflowRequest(dryRun, TimeValue(Instant.now().toEpochMilli()), id, workflow) return client().execute(ExecuteWorkflowAction.INSTANCE, request).get() } From b6f17a8bc07a31cc0d52cd7124e70bad08a124a2 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Thu, 2 Mar 2023 20:52:11 +0100 Subject: [PATCH 11/18] Code adjusted to comments. Wrapped exceptions when executing workflow Signed-off-by: Stevan Buzejic --- .../alerting/DocumentLevelMonitorRunner.kt | 2 +- .../org/opensearch/alerting/InputService.kt | 2 +- .../opensearch/alerting/util/AlertingUtils.kt | 2 ++ .../workflow/CompositeWorkflowRunner.kt | 15 +++++++++--- .../alerting/workflow/WorkflowRunContext.kt | 2 +- .../opensearch/alerting/WorkflowRunnerIT.kt | 24 +++++++++---------- .../transport/AlertingSingleNodeTestCase.kt | 4 ++-- 7 files changed, 31 insertions(+), 20 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 30b2370d6..9b14ec610 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -157,7 +157,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() { val docExecutionContext = DocumentExecutionContext(queries, indexLastRunContext, indexUpdatedRunContext) // If monitor execution is triggered from a workflow - val indexToRelatedDocIdsMap = workflowRunContext?.indexToDocIds + val indexToRelatedDocIdsMap = workflowRunContext?.matchingDocIdsPerIndex val matchingDocs = getMatchingDocs( monitor, diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index dcc9353be..073118dde 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -61,7 +61,7 @@ class InputService( val aggTriggerAfterKey: MutableMap = mutableMapOf() // If monitor execution is triggered from a workflow - val indexToDocIds = workflowRunContext?.indexToDocIds + val indexToDocIds = workflowRunContext?.matchingDocIdsPerIndex // TODO: If/when multiple input queries are supported for Bucket-Level Monitor execution, aggTriggerAfterKeys will // need to be updated to account for it diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index 086c1302c..6b4760147 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -54,6 +54,8 @@ fun Destination.isTestAction(): Boolean = this.type == DestinationType.TEST_ACTI fun Monitor.isDocLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR +fun Monitor.isQueryLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.QUERY_LEVEL_MONITOR + /** * Since buckets can have multi-value keys, this converts the bucket key values to a string that can be used * as the key for a HashMap to easily retrieve [AggregationResultBucket] based on the bucket key values. diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 9f6d9be69..3841d65bd 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -14,6 +14,7 @@ import org.opensearch.alerting.model.MonitorRunResult import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.isDocLevelMonitor +import org.opensearch.alerting.util.isQueryLevelMonitor import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.Delegate import org.opensearch.commons.alerting.model.Monitor @@ -49,10 +50,14 @@ object CompositeWorkflowRunner : WorkflowRunner() { var indexToDocIds = mapOf>() var delegateMonitor: Monitor delegateMonitor = monitorsById[delegate.monitorId] - ?: throw IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ?: throw AlertingException.wrap( + IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ) if (delegate.chainedFindings != null) { val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] - ?: throw IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ?: throw AlertingException.wrap( + IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ) indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, workflowExecutionId) } @@ -76,7 +81,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext ) - } else { + } else if (delegateMonitor.isQueryLevelMonitor()) { QueryLevelMonitorRunner.runMonitor( delegateMonitor, monitorCtx, @@ -85,6 +90,10 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext ) + } else { + throw AlertingException.wrap( + IllegalStateException("Unsupported monitor type") + ) } resultList.add(runResult) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt index b536b694b..b259624ae 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt @@ -8,5 +8,5 @@ package org.opensearch.alerting.workflow data class WorkflowRunContext( val chainedMonitorId: String?, val workflowExecutionId: String, - val indexToDocIds: Map> + val matchingDocIdsPerIndex: Map> ) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index 7fb363384..e7f4d807e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -35,7 +35,7 @@ import java.util.concurrent.ExecutionException class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { fun `test execute workflow with custom alerts and finding index with doc level delegates`() { - val docQuery1 = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3") + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customAlertsIndex1 = "custom_alerts_index" @@ -83,7 +83,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { "message" : "This is an error from IAD region", "source.ip.v6.v2" : 16644, "test_strict_date_time" : "$testTime", - "test_field" : "us-west-2" + "test_field_1" : "us-west-2" }""" indexDoc(index, "1", testDoc1) @@ -93,7 +93,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { "message" : "This is an error from IAD region", "source.ip.v6.v2" : 16645, "test_strict_date_time" : "$testTime", - "test_field" : "us-west-2" + "test_field_1" : "us-west-2" }""" indexDoc(index, "2", testDoc2) @@ -103,7 +103,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { "message" : "This is an error from IAD region", "source.ip.v6.v2" : 16645, "test_strict_date_time" : "$testTime", - "test_field" : "us-east-1" + "test_field_1" : "us-east-1" }""" indexDoc(index, "3", testDoc3) @@ -131,7 +131,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { .lte("{{period_end}}") .format("epoch_millis") val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") + TermsValuesSourceBuilder("test_field_1").field("test_field_1") ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) @@ -168,9 +168,9 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { ) )!! - val docQuery1 = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "1") - val docQuery2 = DocLevelQuery(query = "test_field:\"test_value_1\"", name = "2") - val docQuery3 = DocLevelQuery(query = "test_field:\"test_value_3\"", name = "3") + val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_1\"", name = "2") + val docQuery3 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "3") val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2, docQuery3)) val docTrigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val docCustomAlertsIndex = "custom_alerts_index" @@ -239,8 +239,8 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { } fun `test execute workflow with custom alerts and finding index with bucket level and doc level delegates when doc level delegate is used in chained finding`() { - val docQuery1 = DocLevelQuery(query = "test_field:\"test_value_2\"", name = "1") - val docQuery2 = DocLevelQuery(query = "test_field:\"test_value_3\"", name = "2") + val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1") + val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "2") var docLevelMonitor = randomDocumentLevelMonitor( inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), @@ -259,7 +259,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { .lte("{{period_end}}") .format("epoch_millis") val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") + TermsValuesSourceBuilder("test_field_1").field("test_field_1") ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) @@ -294,7 +294,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { )!! var docLevelMonitor1 = randomDocumentLevelMonitor( - // Match the documents with test_field: test_value_3 + // Match the documents with test_field_1: test_value_3 inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), dataSources = DataSources( diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 8f14e4306..2b763a464 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -88,7 +88,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { val testDoc = """ { "test_strict_date_time": "$testTime", - "test_field": "$value", + "test_field_1": "$value", "number": "$i" } """.trimIndent() @@ -112,7 +112,7 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { .field("type", "date") .field("format", "strict_date_time") .endObject() - .startObject("test_field") + .startObject("test_field_1") .field("type", "keyword") .endObject() .endObject() From 8ded8b8ad3a958f63ab7e45fe01a6ed8f1c17989 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Wed, 8 Mar 2023 20:42:51 +0100 Subject: [PATCH 12/18] Added logic for deleting the workflow underlying monitors. Added validation if the query monitor is part of the workflow chain Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 4 +- .../TransportDeleteWorkflowAction.kt | 160 +++++++++++++++++- ...ion.kt => TransportIndexWorkflowAction.kt} | 37 +++- .../workflow/CompositeWorkflowRunner.kt | 2 +- .../opensearch/alerting/WorkflowMonitorIT.kt | 117 ++++++++++++- .../transport/WorkflowSingleNodeTestCase.kt | 4 +- .../resources/mappings/scheduled-jobs.json | 16 +- 7 files changed, 309 insertions(+), 31 deletions(-) rename alerting/src/main/kotlin/org/opensearch/alerting/transport/{TransportIndexCompositeWorkflowAction.kt => TransportIndexWorkflowAction.kt} (94%) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 5bf49b6e8..6e7dd07bd 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -56,8 +56,8 @@ import org.opensearch.alerting.transport.TransportGetEmailGroupAction import org.opensearch.alerting.transport.TransportGetFindingsSearchAction import org.opensearch.alerting.transport.TransportGetMonitorAction import org.opensearch.alerting.transport.TransportGetWorkflowAction -import org.opensearch.alerting.transport.TransportIndexCompositeWorkflowAction import org.opensearch.alerting.transport.TransportIndexMonitorAction +import org.opensearch.alerting.transport.TransportIndexWorkflowAction import org.opensearch.alerting.transport.TransportSearchEmailAccountAction import org.opensearch.alerting.transport.TransportSearchEmailGroupAction import org.opensearch.alerting.transport.TransportSearchMonitorAction @@ -193,7 +193,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexCompositeWorkflowAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexWorkflowAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ACTION_TYPE, TransportGetWorkflowAction::class.java), ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java), ActionPlugin.ActionHandler(ExecuteWorkflowAction.INSTANCE, TransportExecuteWorkflowAction::class.java) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt index f50042052..a1f01abc0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -10,6 +10,7 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager +import org.apache.lucene.search.join.ScoreMode import org.opensearch.OpenSearchStatusException import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest @@ -17,31 +18,48 @@ import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse +import org.opensearch.action.search.SearchRequest +import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction -import org.opensearch.action.support.WriteRequest +import org.opensearch.action.support.WriteRequest.RefreshPolicy import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings import org.opensearch.alerting.util.AlertingException import org.opensearch.client.Client +import org.opensearch.client.node.NodeClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.AlertingPluginInterface import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.action.DeleteWorkflowRequest import org.opensearch.commons.alerting.action.DeleteWorkflowResponse +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.Schedule import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.WorkflowInput +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.alerting.util.instant import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject import org.opensearch.index.IndexNotFoundException +import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestStatus +import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService +import java.time.Instant +import java.util.Locale private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) @@ -69,14 +87,14 @@ class TransportDeleteWorkflowAction @Inject constructor( val user = readUserFromThreadContext(client) val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) if (!validateUserBackendRoles(user, actionListener)) { return } GlobalScope.launch(Dispatchers.IO + CoroutineName("DeleteWorkflowAction")) { - DeleteWorkflowHandler(client, actionListener, deleteRequest, user, transformedRequest.workflowId).resolveUserAndStart() + DeleteWorkflowHandler(client, actionListener, deleteRequest, transformedRequest.deleteUnderlyingMonitors, user, transformedRequest.workflowId).resolveUserAndStart() } } @@ -84,6 +102,7 @@ class TransportDeleteWorkflowAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val deleteRequest: DeleteRequest, + private val deleteUnderlyingMonitors: Boolean?, private val user: User?, private val workflowId: String ) { @@ -105,6 +124,15 @@ class TransportDeleteWorkflowAction @Inject constructor( val deleteResponse = deleteWorkflow(workflow) // TODO - uncomment once the workflow metadata is added // deleteMetadata(workflow) + if (deleteUnderlyingMonitors == true) { + val underlyingMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() + val monitorIdsToBeDeleted = monitorsAreNotInDifferentWorkflows(workflowId, underlyingMonitorIds) + + // Delete the monitor ids + if (!monitorIdsToBeDeleted.isNullOrEmpty()) { + deleteMonitors(monitorIdsToBeDeleted, RefreshPolicy.IMMEDIATE) + } + } actionListener.onResponse(DeleteWorkflowResponse(deleteResponse.id, deleteResponse.version)) } else { actionListener.onFailure( @@ -129,6 +157,132 @@ class TransportDeleteWorkflowAction @Inject constructor( } } + private suspend fun deleteMonitors(monitorIds: List, refreshPolicy: RefreshPolicy) { + if (monitorIds.isNullOrEmpty()) + return + + for (monitorId in monitorIds) { + val deleteRequest = DeleteMonitorRequest(monitorId, refreshPolicy) + val searchResponse: DeleteMonitorResponse = client.suspendUntil { + AlertingPluginInterface.deleteMonitor(this as NodeClient, deleteRequest, it) + } + } + } + + private suspend fun monitorsAreNotInDifferentWorkflows(workflowIdToBeDeleted: String, monitorIds: List): List { + val queryBuilder = QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_id", workflowIdToBeDeleted)).filter( + QueryBuilders.nestedQuery( + Workflow.WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.termsQuery( + Workflow.WORKFLOW_MONITOR_PATH, + monitorIds + ) + ), + ScoreMode.None + ) + ) + + val searchRequest = SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder).fetchSource(true)) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + + val workflows = searchResponse.hits.hits.map { hit -> + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, XContentType.JSON + ).also { it.nextToken() } + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } + } + workflow.copy(id = hit.id, version = hit.version) + } + val workflowMonitors = workflows.filter { it.id != workflowIdToBeDeleted }.flatMap { (it.inputs[0] as CompositeInput).getMonitorIds() }.distinct() + + return monitorIds.minus(workflowMonitors.toSet()) + } + + fun parse(xcp: XContentParser, id: String = Workflow.NO_ID, version: Long = Workflow.NO_VERSION): Workflow { + var name: String? = null + var workflowType: String = Workflow.WorkflowType.COMPOSITE.toString() + var user: User? = null + var schedule: Schedule? = null + var lastUpdateTime: Instant? = null + var enabledTime: Instant? = null + var enabled = true + var schemaVersion = IndexUtils.NO_SCHEMA_VERSION + val inputs: MutableList = mutableListOf() + var owner = "alerting" + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + Workflow.SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + Workflow.NAME_FIELD -> name = xcp.text() + Workflow.WORKFLOW_TYPE_FIELD -> { + workflowType = xcp.text() + val allowedTypes = Workflow.WorkflowType.values().map { it.value } + if (!allowedTypes.contains(workflowType)) { + throw IllegalStateException("Workflow type should be one of $allowedTypes") + } + } + Workflow.USER_FIELD -> { + user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + } + Workflow.ENABLED_FIELD -> enabled = xcp.booleanValue() + Workflow.SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) + Workflow.INPUTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val input = WorkflowInput.parse(xcp) + inputs.add(input) + } + } + Workflow.ENABLED_TIME_FIELD -> enabledTime = xcp.instant() + Workflow.LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() + Workflow.OWNER_FIELD -> { + owner = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) owner else xcp.text() + } + else -> { + xcp.skipChildren() + } + } + } + + if (enabled && enabledTime == null) { + enabledTime = Instant.now() + } else if (!enabled) { + enabledTime = null + } + return Workflow( + id, + version, + requireNotNull(name) { "Workflow name is null" }, + enabled, + requireNotNull(schedule) { "Workflow schedule is null" }, + lastUpdateTime ?: Instant.now(), + enabledTime, + Workflow.WorkflowType.valueOf(workflowType.uppercase(Locale.ROOT)), + user, + schemaVersion, + inputs.toList(), + owner + ) + } + private suspend fun getWorkflow(): Workflow { val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt similarity index 94% rename from alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt rename to alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt index 7ce4f48ee..a4fa9ce76 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexCompositeWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt @@ -33,6 +33,7 @@ import org.opensearch.alerting.settings.DestinationSettings.Companion.ALLOW_LIST import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.DocLevelMonitorQueries import org.opensearch.alerting.util.IndexUtils +import org.opensearch.alerting.util.isQueryLevelMonitor import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject @@ -55,6 +56,7 @@ import org.opensearch.commons.alerting.model.ScheduledJob.Companion.SCHEDULED_JO import org.opensearch.commons.alerting.model.Workflow import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject +import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.QueryBuilders import org.opensearch.rest.RestRequest import org.opensearch.rest.RestStatus @@ -63,10 +65,10 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.util.stream.Collectors -private val log = LogManager.getLogger(TransportIndexCompositeWorkflowAction::class.java) +private val log = LogManager.getLogger(TransportIndexWorkflowAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportIndexCompositeWorkflowAction @Inject constructor( +class TransportIndexWorkflowAction @Inject constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, @@ -169,7 +171,16 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( try { validateRequest(request) } catch (e: Exception) { - actionListener.onFailure(e) + if (e is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Monitors not found", + RestStatus.NOT_FOUND + ) + ) + } else { + actionListener.onFailure(e) + } return@launch } @@ -480,7 +491,7 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( validateChainedFindings(compositeInput.sequence.delegates) val delegateMonitors = getDelegateMonitors(monitorIds) validateDelegateMonitorsExist(monitorIds, delegateMonitors) - // todo: validate that user has roles to reference delegate monitors + validateChainedFindingsMonitors(compositeInput.sequence.delegates, delegateMonitors) } private fun validateChainedFindings(delegates: List) { @@ -494,12 +505,28 @@ class TransportIndexCompositeWorkflowAction @Inject constructor( ) ) } - if (it.order <= monitorIdOrderMap[it.chainedFindings!!.monitorId]!!) + if (it.order <= monitorIdOrderMap[it.chainedFindings!!.monitorId]!!) { throw AlertingException.wrap( IllegalArgumentException( "Chained Findings Monitor ${it.chainedFindings!!.monitorId} should be executed before monitor ${it.monitorId}" ) ) + } + } + } + } + + private fun validateChainedFindingsMonitors(delegates: List, monitorDelegates: List) { + val monitorsById = monitorDelegates.associateBy { it.id } + delegates.forEach { + if (it.chainedFindings != null) { + val chainedFindingMonitor = monitorsById[it.chainedFindings!!.monitorId] ?: throw AlertingException.wrap( + IllegalArgumentException("Chained finding monitor doesn't exist") + ) + + if (chainedFindingMonitor.isQueryLevelMonitor()) { + throw AlertingException.wrap(IllegalArgumentException("Query level monitor can't be part of chained findings")) + } } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 3841d65bd..6a15364b0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -34,7 +34,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { periodEnd: Instant, dryRun: Boolean ): WorkflowRunResult { - val workflowExecutionId = UUID.randomUUID().toString() + LocalDateTime.now() + val workflowExecutionId = workflow.id.plus(LocalDateTime.now()).plus(UUID.randomUUID().toString()) var workflowResult = WorkflowRunResult(mutableListOf(), periodStart, periodEnd, workflowExecutionId) logger.debug("Workflow ${workflow.id} in $workflowExecutionId execution is running") try { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 56aab4537..3b7fde74c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -353,7 +353,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } - fun `test delete workflow`() { + fun `test delete workflow underlying monitor deleted`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) @@ -376,7 +376,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(getWorkflowResponse) assertEquals(workflowId, getWorkflowResponse.id) - deleteWorkflow(workflowId) + deleteWorkflow(workflowId, true) // Verify that the workflow is deleted try { getWorkflowById(workflowId) @@ -388,9 +388,69 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) } } + // Verify that the monitor is deleted + try { + getMonitorResponse(monitorResponse.id) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetMonitor Action error ", + it.contains("Monitor not found") + ) + } + } } - fun `test delete monitor that is part of workflow sequence`() { + fun `test delete workflow underlying monitor not deleted`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + + var monitorResponse = createMonitor(monitor)!! + + val workflowRequest = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse = upsertWorkflow(workflowRequest)!! + val workflowId = workflowResponse.id + val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) + + assertNotNull(getWorkflowResponse) + assertEquals(workflowId, getWorkflowResponse.id) + + val workflowRequest2 = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id) + ) + val workflowResponse2 = upsertWorkflow(workflowRequest2)!! + val workflowId2 = workflowResponse2.id + val getWorkflowResponse2 = getWorkflowById(id = workflowResponse2.id) + + assertNotNull(getWorkflowResponse2) + assertEquals(workflowId2, getWorkflowResponse2.id) + + deleteWorkflow(workflowId, true) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + val existingMonitor = getMonitorResponse(monitorResponse.id) + assertNotNull(existingMonitor) + } + + fun `test trying to delete monitor that is part of workflow sequence`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) @@ -407,7 +467,6 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { monitorIds = listOf(monitorResponse.id) ) - (workflowRequest.inputs.get(0) as CompositeInput).sequence.delegates.get(0).monitorId val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -718,6 +777,56 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } + fun `test create workflow query monitor chained findings monitor failure`() { + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + + val docMonitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger) + ) + val docMonitorResponse = createMonitor(docMonitor)!! + + val queryMonitor = randomQueryLevelMonitor() + val queryMonitorResponse = createMonitor(queryMonitor)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id) + ) + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Query level monitor can't be part of chained findings") + ) + } + } + } + + fun `test create workflow when monitor index not initialized failure`() { + val delegates = listOf( + Delegate(1, "monitor-1") + ) + val workflow = randomWorkflowMonitorWithDelegates( + delegates = delegates + ) + + try { + upsertWorkflow(workflow) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning IndexWorkflow Action error ", + it.contains("Monitors not found") + ) + } + } + } + fun `test update workflow chained findings monitor not in sequence failure`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 9bc65914e..23505d69b 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -93,10 +93,10 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { ).get() } - protected fun deleteWorkflow(workflowId: String) { + protected fun deleteWorkflow(workflowId: String, deleteUnderlyingMonitors: Boolean? = null) { client().execute( AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, - DeleteWorkflowRequest(workflowId, WriteRequest.RefreshPolicy.IMMEDIATE) + DeleteWorkflowRequest(workflowId, deleteUnderlyingMonitors, WriteRequest.RefreshPolicy.IMMEDIATE) ).get() } diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index 3a94f86ac..c98fb9bfc 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -416,24 +416,12 @@ "type": "integer" }, "monitor_id": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } + "type": "keyword" }, "chained_findings": { "properties": { "monitor_id": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } + "type": "keyword" } } } From a593d386d6869ec81a87a39b673c47fcee7b64aa Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Thu, 9 Mar 2023 18:47:35 +0100 Subject: [PATCH 13/18] Added workflow metadata Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/AlertingPlugin.kt | 1 - .../alerting/DocumentLevelMonitorRunner.kt | 2 +- .../org/opensearch/alerting/MonitorRunner.kt | 8 +- .../alerting/model/AlertingConfigAccessor.kt | 22 ++++ .../alerting/model/WorkflowMetadata.kt | 102 ++++++++++++++++++ .../alerting/model/WorkflowRunResult.kt | 2 +- .../model/workflow/WorkflowRunResult.kt | 3 - .../opensearch/alerting/util/AlertingUtils.kt | 11 ++ .../workflow/CompositeWorkflowRunner.kt | 44 ++++++-- .../alerting/workflow/WorkflowRunContext.kt | 1 + .../opensearch/alerting/WorkflowRunnerIT.kt | 2 +- .../resources/mappings/scheduled-jobs.json | 39 +++++++ 12 files changed, 221 insertions(+), 16 deletions(-) create mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt delete mode 100644 alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 6e7dd07bd..ff52d69e8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -128,7 +128,6 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R @JvmField val WORKFLOW_BASE_URI = "/_plugins/_alerting/workflows" @JvmField val DESTINATION_BASE_URI = "/_plugins/_alerting/destinations" @JvmField val LEGACY_OPENDISTRO_MONITOR_BASE_URI = "/_opendistro/_alerting/monitors" - @JvmField val LEGACY_OPENDISTRO_WORKFLOW_BASE_URI = "/_opendistro/_alerting/workflows" @JvmField val LEGACY_OPENDISTRO_DESTINATION_BASE_URI = "/_opendistro/_alerting/destinations" @JvmField val EMAIL_ACCOUNT_BASE_URI = "$DESTINATION_BASE_URI/email_accounts" @JvmField val EMAIL_GROUP_BASE_URI = "$DESTINATION_BASE_URI/email_groups" diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 9b14ec610..4a5b768b6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -100,7 +100,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() { var monitorMetadata = getMonitorMetadata(monitorCtx.client!!, monitorCtx.xContentRegistry!!, "${monitor.id}-metadata") if (monitorMetadata == null) { - monitorMetadata = createMonitorMetadata(monitor.id) + monitorMetadata = createMonitorMetadata(monitor.id, workflowRunContext?.workflowId) } val isTempMonitor = dryrun || monitor.id == Monitor.NO_ID diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index f234a6eb0..432b34a40 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -183,7 +183,11 @@ abstract class MonitorRunner { return NotificationActionConfigs(destination, channel) } - protected fun createMonitorMetadata(monitorId: String): MonitorMetadata { - return MonitorMetadata("$monitorId-metadata", monitorId, emptyList(), emptyMap()) + protected fun createMonitorMetadata(monitorId: String, workflowId: String? = null): MonitorMetadata { + return if (workflowId.isNullOrEmpty()) { + MonitorMetadata("$monitorId-metadata", monitorId, emptyList(), emptyMap()) + } else { + MonitorMetadata("$monitorId-$workflowId-metadata", monitorId, emptyList(), emptyMap()) + } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt index 7a96d2a44..654ee7d00 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt @@ -51,6 +51,28 @@ class AlertingConfigAccessor { } } + suspend fun getWorkflowMetadata(client: Client, xContentRegistry: NamedXContentRegistry, metadataId: String): WorkflowMetadata? { + return try { + val jobSource = getAlertingConfigDocumentSource(client, "Workflow Metadata", metadataId) + withContext(Dispatchers.IO) { + val xcp = XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + jobSource, XContentType.JSON + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + WorkflowMetadata.parse(xcp) + } + } catch (e: IllegalStateException) { + if (e.message?.equals("Workflow Metadata document with id $metadataId not found or source is empty") == true) { + return null + } else throw e + } catch (e: IndexNotFoundException) { + if (e.message?.equals("no such index [.opendistro-alerting-config]") == true) { + return null + } else throw e + } + } + suspend fun getEmailAccountInfo(client: Client, xContentRegistry: NamedXContentRegistry, emailAccountId: String): EmailAccount { val source = getAlertingConfigDocumentSource(client, "Email account", emailAccountId) return withContext(Dispatchers.IO) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt new file mode 100644 index 000000000..eabe7470d --- /dev/null +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt @@ -0,0 +1,102 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.alerting.model + +import org.opensearch.common.io.stream.StreamInput +import org.opensearch.common.io.stream.StreamOutput +import org.opensearch.common.io.stream.Writeable +import org.opensearch.common.xcontent.ToXContent +import org.opensearch.common.xcontent.XContentBuilder +import org.opensearch.common.xcontent.XContentParser +import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.commons.alerting.util.instant +import java.io.IOException +import java.time.Instant + +data class WorkflowMetadata( + val id: String, + val workflowId: String, + val monitorIds: List, + val latestRunTime: Instant, + val latestExecutionId: String +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + workflowId = sin.readString(), + monitorIds = sin.readStringList(), + latestRunTime = sin.readInstant(), + latestExecutionId = sin.readString() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(workflowId) + out.writeStringCollection(monitorIds) + out.writeInstant(latestRunTime) + out.writeString(latestExecutionId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(WORKFLOW_ID_FIELD, workflowId) + .field(MONITOR_IDS_FIELD, monitorIds) + .field(LATEST_RUN_TIME, latestRunTime) + .field(LATEST_EXECUTION_ID, latestExecutionId) + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "workflow_metadata" + const val WORKFLOW_ID_FIELD = "workflow_id" + const val MONITOR_IDS_FIELD = "monitor_ids" + const val LATEST_RUN_TIME = "latest_run_time" + const val LATEST_EXECUTION_ID = "latest_execution_id" + + @JvmStatic @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser): WorkflowMetadata { + lateinit var workflowId: String + var monitorIds = mutableListOf() + lateinit var latestRunTime: Instant + lateinit var latestExecutionId: String + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + WORKFLOW_ID_FIELD -> workflowId = xcp.text() + MONITOR_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + monitorIds.add(xcp.text()) + } + } + LATEST_RUN_TIME -> latestRunTime = xcp.instant()!! + LATEST_EXECUTION_ID -> latestExecutionId = xcp.text() + } + } + return WorkflowMetadata( + "$workflowId-metadata", + workflowId = workflowId, + monitorIds = monitorIds, + latestRunTime = latestRunTime, + latestExecutionId = latestExecutionId + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowMetadata { + return WorkflowMetadata(sin) + } + } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt index 596875344..762b097f1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowRunResult.kt @@ -17,7 +17,7 @@ import java.time.Instant data class WorkflowRunResult( val workflowRunResult: List> = mutableListOf(), val executionStartTime: Instant, - val executionEndTime: Instant, + val executionEndTime: Instant? = null, val executionId: String, val error: Exception? = null ) : Writeable, ToXContent { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt deleted file mode 100644 index cc6b61745..000000000 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/workflow/WorkflowRunResult.kt +++ /dev/null @@ -1,3 +0,0 @@ -package org.opensearch.alerting.model.workflow - -data class WorkflowRunResult(private val someArg: String) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index 6b4760147..e3c3299ca 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -11,6 +11,7 @@ import org.opensearch.action.index.IndexResponse import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.model.BucketLevelTriggerRunResult import org.opensearch.alerting.model.MonitorMetadata +import org.opensearch.alerting.model.WorkflowMetadata import org.opensearch.alerting.model.destination.Destination import org.opensearch.alerting.opensearchapi.suspendUntil import org.opensearch.alerting.settings.AlertingSettings @@ -134,3 +135,13 @@ suspend fun updateMonitorMetadata(client: Client, settings: Settings, monitorMet return client.suspendUntil { client.index(indexRequest, it) } } + +suspend fun updateWorkflowMetadata(client: Client, settings: Settings, workflowMetadata: WorkflowMetadata): IndexResponse { + val indexRequest = IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(workflowMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .id(workflowMetadata.id) + .timeout(AlertingSettings.INDEX_TIMEOUT.get(settings)) + + return client.suspendUntil { client.index(indexRequest, it) } +} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 6a15364b0..7d2eb58a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -10,11 +10,14 @@ import org.opensearch.alerting.BucketLevelMonitorRunner import org.opensearch.alerting.DocumentLevelMonitorRunner import org.opensearch.alerting.MonitorRunnerExecutionContext import org.opensearch.alerting.QueryLevelMonitorRunner +import org.opensearch.alerting.model.AlertingConfigAccessor import org.opensearch.alerting.model.MonitorRunResult +import org.opensearch.alerting.model.WorkflowMetadata import org.opensearch.alerting.model.WorkflowRunResult import org.opensearch.alerting.util.AlertingException import org.opensearch.alerting.util.isDocLevelMonitor import org.opensearch.alerting.util.isQueryLevelMonitor +import org.opensearch.alerting.util.updateWorkflowMetadata import org.opensearch.commons.alerting.model.CompositeInput import org.opensearch.commons.alerting.model.Delegate import org.opensearch.commons.alerting.model.Monitor @@ -34,15 +37,28 @@ object CompositeWorkflowRunner : WorkflowRunner() { periodEnd: Instant, dryRun: Boolean ): WorkflowRunResult { - val workflowExecutionId = workflow.id.plus(LocalDateTime.now()).plus(UUID.randomUUID().toString()) - var workflowResult = WorkflowRunResult(mutableListOf(), periodStart, periodEnd, workflowExecutionId) - logger.debug("Workflow ${workflow.id} in $workflowExecutionId execution is running") + val workflowExecutionStartTime = Instant.now() + + val executionId = workflow.id.plus(LocalDateTime.now()).plus(UUID.randomUUID().toString()) + var workflowResult = WorkflowRunResult(mutableListOf(), workflowExecutionStartTime, null, executionId) + val isTempMonitor = dryRun || workflow.id == Workflow.NO_ID + + logger.debug("Workflow ${workflow.id} in $executionId execution is running") try { val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } var monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) // Validate the monitors size validateMonitorSize(delegates, monitors, workflow) + var workflowMetadata = AlertingConfigAccessor.getWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.xContentRegistry!!, + "${workflow.id}-metadata" + ) + if (workflowMetadata == null) { + workflowMetadata = createWorkflowMetadata(workflow.id, delegates.map { it.monitorId }, executionId) + } + val monitorsById = monitors.associateBy { it.id } val resultList = mutableListOf>() @@ -58,10 +74,10 @@ object CompositeWorkflowRunner : WorkflowRunner() { ?: throw AlertingException.wrap( IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") ) - indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, workflowExecutionId) + indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, executionId) } - val workflowRunContext = WorkflowRunContext(delegate.chainedFindings?.monitorId, workflowExecutionId, indexToDocIds) + val workflowRunContext = WorkflowRunContext(workflow.id, delegate.chainedFindings?.monitorId, executionId, indexToDocIds) val runResult = if (delegateMonitor.isBucketLevelMonitor()) { BucketLevelMonitorRunner.runMonitor( @@ -97,8 +113,18 @@ object CompositeWorkflowRunner : WorkflowRunner() { } resultList.add(runResult) } - logger.debug("Workflow ${workflow.id} in $workflowExecutionId finished") - return workflowResult.copy(workflowRunResult = resultList) + + logger.debug("Workflow ${workflow.id} in $executionId finished") + // Update metadata only if the workflow is not temp + if (!isTempMonitor) { + updateWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.settings!!, + workflowMetadata.copy(latestRunTime = workflowExecutionStartTime, latestExecutionId = executionId) + ) + } + + return workflowResult.copy(workflowRunResult = resultList, executionEndTime = Instant.now()) } catch (e: Exception) { logger.error("Failed to execute workflow. Error: ${e.message}") return workflowResult.copy(error = AlertingException.wrap(e)) @@ -115,4 +141,8 @@ object CompositeWorkflowRunner : WorkflowRunner() { throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") } } + + private fun createWorkflowMetadata(workflowId: String, monitors: List, executionId: String): WorkflowMetadata { + return WorkflowMetadata("$workflowId-metadata", workflowId, monitors, Instant.now(), executionId) + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt index b259624ae..77fd07179 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt @@ -6,6 +6,7 @@ package org.opensearch.alerting.workflow data class WorkflowRunContext( + val workflowId: String, val chainedMonitorId: String?, val workflowExecutionId: String, val matchingDocIdsPerIndex: Map> diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index e7f4d807e..18ad37928 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -407,7 +407,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { } } - fun `test execute workflow inout error`() { + fun `test execute workflow input error`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index c98fb9bfc..2babe37f4 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -820,6 +820,45 @@ "enabled": false } } + }, + "workflow_metadata" : { + "properties": { + "workflow_id": { + "type": "keyword" + }, + "monitor_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "latest_run_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "latest_execution_id": { + "type": "keyword" + }, + "last_action_execution_times": { + "type": "nested", + "properties": { + "action_id": { + "type": "keyword" + }, + "execution_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + } + } + }, + "last_run_context": { + "type": "object", + "enabled": false + } + } } } } From 8e0d28d51514c692444618151de5dca142b84edf Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Thu, 9 Mar 2023 21:16:34 +0100 Subject: [PATCH 14/18] Added mappings for the workflow-metadata. Added integration tests for checking workflow metadata. Changed flow of workflow execution Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/InputService.kt | 6 +- .../opensearch/alerting/WorkflowService.kt | 73 ----------- .../alerting/model/WorkflowMetadata.kt | 3 +- .../TransportDeleteWorkflowAction.kt | 116 ++++-------------- .../workflow/CompositeWorkflowRunner.kt | 103 +++++++++------- .../opensearch/alerting/WorkflowMonitorIT.kt | 74 ++++++++++- .../opensearch/alerting/WorkflowRunnerIT.kt | 73 ++++++++++- .../transport/WorkflowSingleNodeTestCase.kt | 34 ++++- .../resources/mappings/scheduled-jobs.json | 16 --- 9 files changed, 262 insertions(+), 236 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index 073118dde..bb631a74d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -61,7 +61,7 @@ class InputService( val aggTriggerAfterKey: MutableMap = mutableMapOf() // If monitor execution is triggered from a workflow - val indexToDocIds = workflowRunContext?.matchingDocIdsPerIndex + val matchingDocIdsPerIndex = workflowRunContext?.matchingDocIdsPerIndex // TODO: If/when multiple input queries are supported for Bucket-Level Monitor execution, aggTriggerAfterKeys will // need to be updated to account for it @@ -79,8 +79,8 @@ class InputService( val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) // Rewrite query to consider the doc ids per given index - if (chainedFindingExist(indexToDocIds)) { - val updatedSourceQuery = updateInputQueryWithFindingDocIds(rewrittenQuery.query(), indexToDocIds!!) + if (chainedFindingExist(matchingDocIdsPerIndex)) { + val updatedSourceQuery = updateInputQueryWithFindingDocIds(rewrittenQuery.query(), matchingDocIdsPerIndex!!) rewrittenQuery.query(updatedSourceQuery) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt index e4b34fcce..ae018c843 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -7,8 +7,6 @@ package org.opensearch.alerting import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchException -import org.opensearch.action.get.GetRequest -import org.opensearch.action.get.GetResponse import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.alerting.opensearchapi.suspendUntil @@ -17,19 +15,14 @@ import org.opensearch.client.Client import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentFactory -import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.model.Finding import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.model.ScheduledJob -import org.opensearch.index.query.BoolQueryBuilder -import org.opensearch.index.query.MatchQueryBuilder import org.opensearch.index.query.QueryBuilders -import org.opensearch.index.query.TermsQueryBuilder import org.opensearch.search.builder.SearchSourceBuilder -import java.util.stream.Collectors private val log = LogManager.getLogger(WorkflowService::class.java) @@ -134,70 +127,4 @@ class WorkflowService( } return monitors } - - suspend fun getDocIdsPerFindingIndex(monitorId: String, workflowExecutionId: String): Map> { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) - - val getResponse: GetResponse = client.suspendUntil { - client.get(getRequest, it) - } - - val monitor = if (!getResponse.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - ScheduledJob.parse(xcp, getResponse.id, getResponse.version) as Monitor - } - } else throw IllegalStateException("Delegate monitors don't exist $monitorId") - // Search findings index per monitor and workflow execution id - val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Finding.MONITOR_ID_FIELD, monitor.id)) - .filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder() - .query(bqb) - .version(true) - .seqNoAndPrimaryTerm(true) - ) - .indices(monitor.dataSources.findingsIndex) - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - - // Get the findings docs - val findings = mutableListOf() - for (hit in searchResponse.hits) { - val xcp = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val finding = Finding.parse(xcp) - findings.add(finding) - } - - val indexToRelatedDocIdsMap = mutableMapOf>() - - for (finding in findings) { - indexToRelatedDocIdsMap.getOrPut(finding.index) { mutableListOf() }.addAll(finding.relatedDocIds) - } - - val toTypedArray = indexToRelatedDocIdsMap.keys.stream().collect(Collectors.toList()).toTypedArray() - val searchFindings = SearchRequest().indices(*toTypedArray) - val queryBuilder = QueryBuilders.boolQuery() - indexToRelatedDocIdsMap.forEach { entry -> - queryBuilder - .should() - .add( - BoolQueryBuilder() - .must(MatchQueryBuilder("_index", entry.key)) - .must(TermsQueryBuilder("_id", entry.value)) - ) - } - searchFindings.source(SearchSourceBuilder().query(queryBuilder)) - val finalQueryResponse: SearchResponse = client.suspendUntil { client.search(searchFindings, it) } - - val indexDocIds = mutableMapOf>() - for (hit in finalQueryResponse.hits) { - indexDocIds.getOrPut(hit.index) { mutableListOf() }.add(hit.id) - } - return indexDocIds - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt index eabe7470d..c07bcfdb4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/WorkflowMetadata.kt @@ -13,6 +13,7 @@ import org.opensearch.common.xcontent.XContentBuilder import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField import java.io.IOException import java.time.Instant @@ -46,7 +47,7 @@ data class WorkflowMetadata( if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) builder.field(WORKFLOW_ID_FIELD, workflowId) .field(MONITOR_IDS_FIELD, monitorIds) - .field(LATEST_RUN_TIME, latestRunTime) + .optionalTimeField(LATEST_RUN_TIME, latestRunTime) .field(LATEST_EXECUTION_ID, latestExecutionId) if (params.paramAsBoolean("with_type", false)) builder.endObject() return builder.endObject() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt index a1f01abc0..5d6086a1d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -35,7 +35,6 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.AlertingPluginInterface import org.opensearch.commons.alerting.action.AlertingActions @@ -44,12 +43,8 @@ import org.opensearch.commons.alerting.action.DeleteMonitorResponse import org.opensearch.commons.alerting.action.DeleteWorkflowRequest import org.opensearch.commons.alerting.action.DeleteWorkflowResponse import org.opensearch.commons.alerting.model.CompositeInput -import org.opensearch.commons.alerting.model.Schedule import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.commons.alerting.model.Workflow -import org.opensearch.commons.alerting.model.WorkflowInput -import org.opensearch.commons.alerting.util.IndexUtils -import org.opensearch.commons.alerting.util.instant import org.opensearch.commons.authuser.User import org.opensearch.commons.utils.recreateObject import org.opensearch.index.IndexNotFoundException @@ -58,11 +53,13 @@ import org.opensearch.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -import java.time.Instant -import java.util.Locale private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) +/** + * Transport class that deletes the workflow. + * If the deleteDelegateMonitor flag is set to true, deletes the workflow delegates that are not part of another workflow + */ class TransportDeleteWorkflowAction @Inject constructor( transportService: TransportService, val client: Client, @@ -94,7 +91,14 @@ class TransportDeleteWorkflowAction @Inject constructor( } GlobalScope.launch(Dispatchers.IO + CoroutineName("DeleteWorkflowAction")) { - DeleteWorkflowHandler(client, actionListener, deleteRequest, transformedRequest.deleteUnderlyingMonitors, user, transformedRequest.workflowId).resolveUserAndStart() + DeleteWorkflowHandler( + client, + actionListener, + deleteRequest, + transformedRequest.deleteDelegateMonitors, + user, + transformedRequest.workflowId + ).resolveUserAndStart() } } @@ -102,7 +106,7 @@ class TransportDeleteWorkflowAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val deleteRequest: DeleteRequest, - private val deleteUnderlyingMonitors: Boolean?, + private val deleteDelegateMonitors: Boolean?, private val user: User?, private val workflowId: String ) { @@ -122,11 +126,10 @@ class TransportDeleteWorkflowAction @Inject constructor( if (canDelete) { val deleteResponse = deleteWorkflow(workflow) - // TODO - uncomment once the workflow metadata is added - // deleteMetadata(workflow) - if (deleteUnderlyingMonitors == true) { - val underlyingMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() - val monitorIdsToBeDeleted = monitorsAreNotInDifferentWorkflows(workflowId, underlyingMonitorIds) + deleteMetadata(workflow) + if (deleteDelegateMonitors == true) { + val delegateMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() + val monitorIdsToBeDeleted = getDeletableDelegates(workflowId, delegateMonitorIds) // Delete the monitor ids if (!monitorIdsToBeDeleted.isNullOrEmpty()) { @@ -169,7 +172,13 @@ class TransportDeleteWorkflowAction @Inject constructor( } } - private suspend fun monitorsAreNotInDifferentWorkflows(workflowIdToBeDeleted: String, monitorIds: List): List { + /** + * Returns lit of monitor ids belonging only to a given workflow + * @param workflowIdToBeDeleted Id of the workflow that should be deleted + * @param monitorIds List of delegate monitor ids (underlying monitor ids) + */ + private suspend fun getDeletableDelegates(workflowIdToBeDeleted: String, monitorIds: List): List { + // Retrieve monitors belonging to another workflows val queryBuilder = QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_id", workflowIdToBeDeleted)).filter( QueryBuilders.nestedQuery( Workflow.WORKFLOW_DELEGATE_PATH, @@ -204,85 +213,10 @@ class TransportDeleteWorkflowAction @Inject constructor( workflow.copy(id = hit.id, version = hit.version) } val workflowMonitors = workflows.filter { it.id != workflowIdToBeDeleted }.flatMap { (it.inputs[0] as CompositeInput).getMonitorIds() }.distinct() - + // Monitors that can be deleted -> all monitors - monitors belonging to another workflows return monitorIds.minus(workflowMonitors.toSet()) } - fun parse(xcp: XContentParser, id: String = Workflow.NO_ID, version: Long = Workflow.NO_VERSION): Workflow { - var name: String? = null - var workflowType: String = Workflow.WorkflowType.COMPOSITE.toString() - var user: User? = null - var schedule: Schedule? = null - var lastUpdateTime: Instant? = null - var enabledTime: Instant? = null - var enabled = true - var schemaVersion = IndexUtils.NO_SCHEMA_VERSION - val inputs: MutableList = mutableListOf() - var owner = "alerting" - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = xcp.currentName() - xcp.nextToken() - - when (fieldName) { - Workflow.SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() - Workflow.NAME_FIELD -> name = xcp.text() - Workflow.WORKFLOW_TYPE_FIELD -> { - workflowType = xcp.text() - val allowedTypes = Workflow.WorkflowType.values().map { it.value } - if (!allowedTypes.contains(workflowType)) { - throw IllegalStateException("Workflow type should be one of $allowedTypes") - } - } - Workflow.USER_FIELD -> { - user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) - } - Workflow.ENABLED_FIELD -> enabled = xcp.booleanValue() - Workflow.SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) - Workflow.INPUTS_FIELD -> { - XContentParserUtils.ensureExpectedToken( - XContentParser.Token.START_ARRAY, - xcp.currentToken(), - xcp - ) - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - val input = WorkflowInput.parse(xcp) - inputs.add(input) - } - } - Workflow.ENABLED_TIME_FIELD -> enabledTime = xcp.instant() - Workflow.LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() - Workflow.OWNER_FIELD -> { - owner = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) owner else xcp.text() - } - else -> { - xcp.skipChildren() - } - } - } - - if (enabled && enabledTime == null) { - enabledTime = Instant.now() - } else if (!enabled) { - enabledTime = null - } - return Workflow( - id, - version, - requireNotNull(name) { "Workflow name is null" }, - enabled, - requireNotNull(schedule) { "Workflow schedule is null" }, - lastUpdateTime ?: Instant.now(), - enabledTime, - Workflow.WorkflowType.valueOf(workflowType.uppercase(Locale.ROOT)), - user, - schemaVersion, - inputs.toList(), - owner - ) - } - private suspend fun getWorkflow(): Workflow { val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 7d2eb58a3..c183d3125 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -44,42 +44,57 @@ object CompositeWorkflowRunner : WorkflowRunner() { val isTempMonitor = dryRun || workflow.id == Workflow.NO_ID logger.debug("Workflow ${workflow.id} in $executionId execution is running") + val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + var monitors: List + try { - val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } - var monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) - // Validate the monitors size - validateMonitorSize(delegates, monitors, workflow) + monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + } catch (e: Exception) { + logger.error("Failed to execute workflow. Error: ${e.message}") + return workflowResult.copy(error = AlertingException.wrap(e)) + } + // Validate the monitors size + validateMonitorSize(delegates, monitors, workflow) - var workflowMetadata = AlertingConfigAccessor.getWorkflowMetadata( - monitorCtx.client!!, - monitorCtx.xContentRegistry!!, - "${workflow.id}-metadata" - ) - if (workflowMetadata == null) { - workflowMetadata = createWorkflowMetadata(workflow.id, delegates.map { it.monitorId }, executionId) - } + var workflowMetadata = AlertingConfigAccessor.getWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.xContentRegistry!!, + "${workflow.id}-metadata" + ) + if (workflowMetadata == null) { + workflowMetadata = createWorkflowMetadata(workflow.id, delegates.map { it.monitorId }, executionId) + } - val monitorsById = monitors.associateBy { it.id } - val resultList = mutableListOf>() + val monitorsById = monitors.associateBy { it.id } + val resultList = mutableListOf>() + var lastErrorDelegateRun: Exception? = null - for (delegate in delegates) { - var indexToDocIds = mapOf>() - var delegateMonitor: Monitor - delegateMonitor = monitorsById[delegate.monitorId] + for (delegate in delegates) { + var indexToDocIds = mapOf>() + var delegateMonitor: Monitor + delegateMonitor = monitorsById[delegate.monitorId] + ?: throw AlertingException.wrap( + IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + ) + if (delegate.chainedFindings != null) { + val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] ?: throw AlertingException.wrap( - IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") ) - if (delegate.chainedFindings != null) { - val chainedMonitor = monitorsById[delegate.chainedFindings!!.monitorId] - ?: throw AlertingException.wrap( - IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") - ) + + try { indexToDocIds = monitorCtx.workflowService!!.getFindingDocIdsByExecutionId(chainedMonitor, executionId) + } catch (e: Exception) { + logger.error("Failed to execute workflow. Error: ${e.message}") + return workflowResult.copy(error = AlertingException.wrap(e)) } + } - val workflowRunContext = WorkflowRunContext(workflow.id, delegate.chainedFindings?.monitorId, executionId, indexToDocIds) + val workflowRunContext = WorkflowRunContext(workflow.id, delegate.chainedFindings?.monitorId, executionId, indexToDocIds) - val runResult = if (delegateMonitor.isBucketLevelMonitor()) { + var delegateRunResult: MonitorRunResult<*>? + try { + delegateRunResult = if (delegateMonitor.isBucketLevelMonitor()) { BucketLevelMonitorRunner.runMonitor( delegateMonitor, monitorCtx, @@ -111,24 +126,24 @@ object CompositeWorkflowRunner : WorkflowRunner() { IllegalStateException("Unsupported monitor type") ) } - resultList.add(runResult) + } catch (ex: Exception) { + logger.error("Error executing workflow delegate. Error: ${ex.message}") + lastErrorDelegateRun = AlertingException.wrap(ex) + continue } - - logger.debug("Workflow ${workflow.id} in $executionId finished") - // Update metadata only if the workflow is not temp - if (!isTempMonitor) { - updateWorkflowMetadata( - monitorCtx.client!!, - monitorCtx.settings!!, - workflowMetadata.copy(latestRunTime = workflowExecutionStartTime, latestExecutionId = executionId) - ) - } - - return workflowResult.copy(workflowRunResult = resultList, executionEndTime = Instant.now()) - } catch (e: Exception) { - logger.error("Failed to execute workflow. Error: ${e.message}") - return workflowResult.copy(error = AlertingException.wrap(e)) + if (delegateRunResult != null) resultList.add(delegateRunResult) + } + logger.debug("Workflow ${workflow.id} in $executionId finished") + // Update metadata only if the workflow is not temp + if (!isTempMonitor) { + updateWorkflowMetadata( + monitorCtx.client!!, + monitorCtx.settings!!, + workflowMetadata.copy(latestRunTime = workflowExecutionStartTime, latestExecutionId = executionId) + ) } + + return workflowResult.copy(workflowRunResult = resultList, executionEndTime = Instant.now(), error = lastErrorDelegateRun) } private fun validateMonitorSize( @@ -138,7 +153,9 @@ object CompositeWorkflowRunner : WorkflowRunner() { ) { if (delegates.size != monitors.size) { val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() - throw IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") + throw AlertingException.wrap( + IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") + ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 3b7fde74c..5f368ba17 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -14,6 +14,9 @@ import org.opensearch.commons.alerting.model.DocLevelMonitorInput import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.commons.alerting.model.Monitor import org.opensearch.rest.RestRequest +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit import java.util.Collections class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { @@ -353,7 +356,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } - fun `test delete workflow underlying monitor deleted`() { + fun `test delete workflow delegate monitor deleted`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) @@ -401,7 +404,74 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } } - fun `test delete workflow underlying monitor not deleted`() { + fun `test delete executed workflow with metadata deleted`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + deleteWorkflow(workflowId, true) + // Verify that the workflow is deleted + try { + getWorkflowById(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow not found.") + ) + } + } + // Verify that the workflow metadata is deleted + try { + searchWorkflowMetadata(workflowId) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetMonitor Action error ", + it.contains("List is empty") + ) + } + } + } + + fun `test delete workflow delegate monitor not deleted`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3")) ) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index 18ad37928..8fd2f94ad 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -74,7 +74,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { monitorIds = listOf(monitorResponse.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! - val workflowById = searchWorkflow(workflowResponse.id)!! + val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) @@ -125,6 +125,69 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) } + fun `test execute workflow verify workflow metadata`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + val workflowMetadata = searchWorkflowMetadata(id = workflowId) + assertNotNull("Workflow metadata not initialized", workflowMetadata) + assertEquals( + "Workflow metadata execution id not correct", + executeWorkflowResponse.workflowRunResult.executionId, + workflowMetadata!!.latestExecutionId + ) + + val executeWorkflowResponse1 = executeWorkflow(workflowById, workflowId, false)!! + val monitorsRunResults1 = executeWorkflowResponse1.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults1.size) + + val workflowMetadata1 = searchWorkflowMetadata(id = workflowId) + assertNotNull("Workflow metadata not initialized", workflowMetadata) + assertEquals( + "Workflow metadata execution id not correct", + executeWorkflowResponse1.workflowRunResult.executionId, + workflowMetadata1!!.latestExecutionId + ) + } + fun `test execute workflow with custom alerts and finding index with bucket level doc level delegates when bucket level delegate is used in chained finding`() { val query = QueryBuilders.rangeQuery("test_strict_date_time") .gt("{{period_end}}||-10d") @@ -192,7 +255,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! - val workflowById = searchWorkflow(workflowResponse.id)!! + val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) // Creates 5 documents @@ -329,7 +392,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id, docLevelMonitorResponse1.id, queryMonitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! - val workflowById = searchWorkflow(workflowResponse.id)!! + val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) // Creates 5 documents @@ -423,12 +486,12 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! - val workflowById = searchWorkflow(workflowResponse.id)!! + val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) deleteIndex(index) - val response = executeWorkflow(workflowById, workflowById.id, false)!! + val response = executeWorkflow(workflowById, workflowById!!.id, false)!! assertNotNull(response.workflowRunResult.error) assertTrue(response.workflowRunResult.error is AlertingException) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt index 23505d69b..21af929cb 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/WorkflowSingleNodeTestCase.kt @@ -10,6 +10,7 @@ import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.action.ExecuteWorkflowAction import org.opensearch.alerting.action.ExecuteWorkflowRequest import org.opensearch.alerting.action.ExecuteWorkflowResponse +import org.opensearch.alerting.model.WorkflowMetadata import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.json.JsonXContent @@ -65,6 +66,35 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { }.first() } + protected fun searchWorkflowMetadata( + id: String, + indices: String = ScheduledJob.SCHEDULED_JOBS_INDEX, + refresh: Boolean = true, + ): WorkflowMetadata? { + try { + if (refresh) refreshIndex(indices) + } catch (e: Exception) { + logger.warn("Could not refresh index $indices because: ${e.message}") + return null + } + val ssb = SearchSourceBuilder() + ssb.version(true) + ssb.query(TermQueryBuilder("workflow_metadata.workflow_id", id)) + val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() + + return searchResponse.hits.hits.map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var workflowMetadata: WorkflowMetadata + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow_metadata" -> workflowMetadata = WorkflowMetadata.parse(xcp) + } + } + workflowMetadata.copy(id = it.id) + }.first() + } + protected fun upsertWorkflow( workflow: Workflow, id: String = Workflow.NO_ID, @@ -93,10 +123,10 @@ abstract class WorkflowSingleNodeTestCase : AlertingSingleNodeTestCase() { ).get() } - protected fun deleteWorkflow(workflowId: String, deleteUnderlyingMonitors: Boolean? = null) { + protected fun deleteWorkflow(workflowId: String, deleteDelegateMonitors: Boolean? = null) { client().execute( AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, - DeleteWorkflowRequest(workflowId, deleteUnderlyingMonitors, WriteRequest.RefreshPolicy.IMMEDIATE) + DeleteWorkflowRequest(workflowId, deleteDelegateMonitors, WriteRequest.RefreshPolicy.IMMEDIATE) ).get() } diff --git a/core/src/main/resources/mappings/scheduled-jobs.json b/core/src/main/resources/mappings/scheduled-jobs.json index 2babe37f4..630f99cfd 100644 --- a/core/src/main/resources/mappings/scheduled-jobs.json +++ b/core/src/main/resources/mappings/scheduled-jobs.json @@ -841,22 +841,6 @@ }, "latest_execution_id": { "type": "keyword" - }, - "last_action_execution_times": { - "type": "nested", - "properties": { - "action_id": { - "type": "keyword" - }, - "execution_time": { - "type": "date", - "format": "strict_date_time||epoch_millis" - } - } - }, - "last_run_context": { - "type": "object", - "enabled": false } } } From af86c6909834a329d4ea63fb61d4155e579a9474 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Mar 2023 11:46:36 +0100 Subject: [PATCH 15/18] Renamed properties. Added workflow metadata dryrun integration test that verify that workflow metadata is not created Signed-off-by: Stevan Buzejic --- .../alerting/BucketLevelMonitorRunner.kt | 2 +- .../alerting/DocumentLevelMonitorRunner.kt | 2 +- .../org/opensearch/alerting/InputService.kt | 2 +- .../alerting/workflow/WorkflowRunContext.kt | 2 +- .../opensearch/alerting/WorkflowRunnerIT.kt | 57 ++++++++++++++++++- 5 files changed, 59 insertions(+), 6 deletions(-) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 4c8bdb20a..45eebc332 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -397,7 +397,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { sr.source().query(queryBuilder) } val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(sr, it) } - return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding, workflowRunContext?.workflowExecutionId) + return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding, workflowRunContext?.executionId) } else { logger.error("Couldn't resolve groupBy field. Not generating bucket level monitor findings for monitor %${monitor.id}") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 4a5b768b6..787a43ddb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -214,7 +214,7 @@ object DocumentLevelMonitorRunner : MonitorRunner() { docsToQueries, queryToDocIds, dryrun, - workflowRunContext?.workflowExecutionId + workflowRunContext?.executionId ) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index bb631a74d..6401ca6b8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -79,7 +79,7 @@ class InputService( val rewrittenQuery = AggregationQueryRewriter.rewriteQuery(deepCopyQuery(input.query), prevResult, monitor.triggers) // Rewrite query to consider the doc ids per given index - if (chainedFindingExist(matchingDocIdsPerIndex)) { + if (chainedFindingExist(matchingDocIdsPerIndex) && rewrittenQuery.query() != null) { val updatedSourceQuery = updateInputQueryWithFindingDocIds(rewrittenQuery.query(), matchingDocIdsPerIndex!!) rewrittenQuery.query(updatedSourceQuery) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt index 77fd07179..27cbb6e11 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunContext.kt @@ -8,6 +8,6 @@ package org.opensearch.alerting.workflow data class WorkflowRunContext( val workflowId: String, val chainedMonitorId: String?, - val workflowExecutionId: String, + val executionId: String, val matchingDocIdsPerIndex: Map> ) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index 8fd2f94ad..2f7efc82a 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -30,6 +30,7 @@ import java.lang.Exception import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit +import java.util.NoSuchElementException import java.util.concurrent.ExecutionException class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { @@ -161,7 +162,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { "test_field_1" : "us-west-2" }""" indexDoc(index, "1", testDoc1) - + // First execution val workflowId = workflowResponse.id val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! val monitorsRunResults = executeWorkflowResponse.workflowRunResult.workflowRunResult @@ -174,7 +175,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { executeWorkflowResponse.workflowRunResult.executionId, workflowMetadata!!.latestExecutionId ) - + // Second execution val executeWorkflowResponse1 = executeWorkflow(workflowById, workflowId, false)!! val monitorsRunResults1 = executeWorkflowResponse1.workflowRunResult.workflowRunResult assertEquals(2, monitorsRunResults1.size) @@ -188,6 +189,58 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { ) } + fun `test execute workflow dryrun verify workflow metadata not created`() { + val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3") + val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) + val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1) + ) + val monitorResponse = createMonitor(monitor1)!! + + val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4") + val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) + val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + var monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) + + val monitorResponse2 = createMonitor(monitor2)!! + + var workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + val workflowById = searchWorkflow(workflowResponse.id) + assertNotNull(workflowById) + + var testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) + // Matches monitor1 + val testDoc1 = """{ + "message" : "This is an error from IAD region", + "source.ip.v6.v2" : 16644, + "test_strict_date_time" : "$testTime", + "test_field_1" : "us-west-2" + }""" + indexDoc(index, "1", testDoc1) + // First execution + val workflowId = workflowResponse.id + val executeWorkflowResponse = executeWorkflow(workflowById, workflowId, true) + assertNotNull("Workflow run result is null", executeWorkflowResponse) + val monitorsRunResults = executeWorkflowResponse!!.workflowRunResult.workflowRunResult + assertEquals(2, monitorsRunResults.size) + + var exception: Exception? = null + try { + searchWorkflowMetadata(id = workflowId) + } catch (ex: Exception) { + exception = ex + } + assertTrue(exception is NoSuchElementException) + } + fun `test execute workflow with custom alerts and finding index with bucket level doc level delegates when bucket level delegate is used in chained finding`() { val query = QueryBuilders.rangeQuery("test_strict_date_time") .gt("{{period_end}}||-10d") From 4dd13ed413c9ca60deb42c7d17a8e3ac5cc3a478 Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Mar 2023 15:59:44 +0100 Subject: [PATCH 16/18] Added workflow integration test for verifying changing the order of the monitors once the workflow is updated Signed-off-by: Stevan Buzejic --- .../opensearch/alerting/WorkflowMonitorIT.kt | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 5f368ba17..a8be7f5b6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -197,6 +197,92 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) } + fun `test update workflow change order of delegate monitors`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitor2 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + val monitorResponse2 = createMonitor(monitor2)!! + + val workflow = randomWorkflowMonitor( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) + ) + + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + assertNotNull(workflowResponse.workflow) + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowResponse.id) + assertTrue("incorrect version", workflowResponse.version > 0) + + var workflowById = searchWorkflow(workflowResponse.id)!! + assertNotNull(workflowById) + + val updatedWorkflowResponse = upsertWorkflow( + randomWorkflowMonitor( + monitorIds = listOf(monitorResponse2.id, monitorResponse1.id) + ), + workflowResponse.id, + RestRequest.Method.PUT + )!! + + assertNotNull("Workflow creation failed", updatedWorkflowResponse) + assertNotNull(updatedWorkflowResponse.workflow) + assertEquals("Workflow id changed", workflowResponse.id, updatedWorkflowResponse.id) + assertTrue("incorrect version", updatedWorkflowResponse.version > 0) + + workflowById = searchWorkflow(updatedWorkflowResponse.id)!! + + // Verify workflow + assertNotEquals("response is missing Id", Monitor.NO_ID, workflowById.id) + assertTrue("incorrect version", workflowById.version > 0) + assertEquals("Workflow name not correct", updatedWorkflowResponse.workflow.name, workflowById.name) + assertEquals("Workflow owner not correct", updatedWorkflowResponse.workflow.owner, workflowById.owner) + assertEquals("Workflow input not correct", updatedWorkflowResponse.workflow.inputs, workflowById.inputs) + + // Delegate verification + @Suppress("UNCHECKED_CAST") + val delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } + assertEquals("Delegates size not correct", 2, delegates.size) + + val delegate1 = delegates[0] + assertNotNull(delegate1) + assertEquals("Delegate1 order not correct", 1, delegate1.order) + assertEquals("Delegate1 id not correct", monitorResponse2.id, delegate1.monitorId) + + val delegate2 = delegates[1] + assertNotNull(delegate2) + assertEquals("Delegate2 order not correct", 2, delegate2.order) + assertEquals("Delegate2 id not correct", monitorResponse1.id, delegate2.monitorId) + assertEquals( + "Delegate2 Chained finding not correct", monitorResponse2.id, delegate2.chainedFindings!!.monitorId + ) + } + fun `test update workflow remove monitor success`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") val docLevelInput = DocLevelMonitorInput( From a14dfead85b2ce0cdce1c89412ca2f15d11eb79c Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Mar 2023 16:10:59 +0100 Subject: [PATCH 17/18] Renamed methods for generating the workflows Signed-off-by: Stevan Buzejic --- .../org/opensearch/alerting/TestHelpers.kt | 4 +- .../opensearch/alerting/WorkflowMonitorIT.kt | 66 +++++++++---------- .../opensearch/alerting/WorkflowRunnerIT.kt | 14 ++-- 3 files changed, 42 insertions(+), 42 deletions(-) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index d06f6fa91..0723fd554 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -223,7 +223,7 @@ fun randomDocumentLevelMonitor( ) } -fun randomWorkflowMonitor( +fun randomWorkflow( id: String = Workflow.NO_ID, monitorIds: List, name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -255,7 +255,7 @@ fun randomWorkflowMonitor( ) } -fun randomWorkflowMonitorWithDelegates( +fun randomWorkflowWithDelegates( id: String = Workflow.NO_ID, delegates: List, name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index a8be7f5b6..93b94ac9e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -53,7 +53,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) ) @@ -124,7 +124,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) ) @@ -149,7 +149,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse3 = createMonitor(monitor3)!! val updatedWorkflowResponse = upsertWorkflow( - randomWorkflowMonitor( + randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id, monitorResponse3.id) ), workflowResponse.id, @@ -229,7 +229,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) ) @@ -243,7 +243,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(workflowById) val updatedWorkflowResponse = upsertWorkflow( - randomWorkflowMonitor( + randomWorkflow( monitorIds = listOf(monitorResponse2.id, monitorResponse1.id) ), workflowResponse.id, @@ -315,7 +315,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) ) @@ -329,7 +329,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(workflowById) val updatedWorkflowResponse = upsertWorkflow( - randomWorkflowMonitor( + randomWorkflow( monitorIds = listOf(monitorResponse1.id) ), workflowResponse.id, @@ -373,7 +373,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflowMonitor( + val workflowRequest = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) @@ -455,7 +455,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflowMonitor( + val workflowRequest = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflowRequest)!! @@ -510,7 +510,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -570,7 +570,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { var monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflowMonitor( + val workflowRequest = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflowRequest)!! @@ -580,7 +580,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertNotNull(getWorkflowResponse) assertEquals(workflowId, getWorkflowResponse.id) - val workflowRequest2 = randomWorkflowMonitor( + val workflowRequest2 = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse2 = upsertWorkflow(workflowRequest2)!! @@ -619,7 +619,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflowMonitor( + val workflowRequest = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) @@ -682,7 +682,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } fun `test create workflow without delegate failure`() { - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = Collections.emptyList() ) try { @@ -715,14 +715,14 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflowMonitor( + workflow = randomWorkflow( id = workflowResponse.id, monitorIds = Collections.emptyList() ) @@ -739,7 +739,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { } fun `test create workflow duplicate delegate failure`() { - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf("1", "1", "2") ) try { @@ -766,14 +766,14 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflowMonitor( + workflow = randomWorkflow( id = workflowResponse.id, monitorIds = listOf("1", "1", "2") ) @@ -801,7 +801,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - val workflow = randomWorkflowMonitor( + val workflow = randomWorkflow( monitorIds = listOf("-1", monitorResponse.id) ) try { @@ -828,13 +828,13 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflowMonitor( + workflow = randomWorkflow( id = workflowResponse.id, monitorIds = listOf("-1", monitorResponse.id) ) @@ -857,7 +857,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(1, "monitor-2"), Delegate(2, "monitor-3") ) - val workflow = randomWorkflowMonitorWithDelegates( + val workflow = randomWorkflowWithDelegates( delegates = delegates ) try { @@ -884,7 +884,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -895,7 +895,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(1, "monitor-2"), Delegate(2, "monitor-3") ) - workflow = randomWorkflowMonitorWithDelegates( + workflow = randomWorkflowWithDelegates( id = workflowResponse.id, delegates = delegates ) @@ -917,7 +917,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(2, "monitor-2", ChainedFindings("monitor-1")), Delegate(3, "monitor-3", ChainedFindings("monitor-x")) ) - val workflow = randomWorkflowMonitorWithDelegates( + val workflow = randomWorkflowWithDelegates( delegates = delegates ) @@ -948,7 +948,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val queryMonitor = randomQueryLevelMonitor() val queryMonitorResponse = createMonitor(queryMonitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id) ) try { @@ -967,7 +967,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { val delegates = listOf( Delegate(1, "monitor-1") ) - val workflow = randomWorkflowMonitorWithDelegates( + val workflow = randomWorkflowWithDelegates( delegates = delegates ) @@ -995,7 +995,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -1006,7 +1006,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(2, "monitor-2", ChainedFindings("monitor-1")), Delegate(3, "monitor-3", ChainedFindings("monitor-x")) ) - workflow = randomWorkflowMonitorWithDelegates( + workflow = randomWorkflowWithDelegates( id = workflowResponse.id, delegates = delegates ) @@ -1029,7 +1029,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(3, "monitor-2", ChainedFindings("monitor-1")), Delegate(2, "monitor-3", ChainedFindings("monitor-2")) ) - val workflow = randomWorkflowMonitorWithDelegates( + val workflow = randomWorkflowWithDelegates( delegates = delegates ) @@ -1057,7 +1057,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -1068,7 +1068,7 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { Delegate(3, "monitor-2", ChainedFindings("monitor-1")), Delegate(2, "monitor-3", ChainedFindings("monitor-2")) ) - workflow = randomWorkflowMonitorWithDelegates( + workflow = randomWorkflowWithDelegates( delegates = delegates ) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt index 2f7efc82a..466e62d35 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowRunnerIT.kt @@ -71,7 +71,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -146,7 +146,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -209,7 +209,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id, monitorResponse2.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -304,7 +304,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -441,7 +441,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val queryMonitorResponse = createMonitor(randomQueryLevelMonitor(inputs = listOf(queryMonitorInput), triggers = listOf(queryLevelTrigger)))!! // 1. docMonitor (chainedFinding = null) 2. bucketMonitor (chainedFinding = docMonitor) 3. docMonitor (chainedFinding = bucketMonitor) 4. queryMonitor (chainedFinding = docMonitor 3) - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id, docLevelMonitorResponse1.id, queryMonitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -535,7 +535,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflowMonitor( + var workflow = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflow)!! @@ -565,7 +565,7 @@ class WorkflowRunnerIT : WorkflowSingleNodeTestCase() { val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflowMonitor( + val workflowRequest = randomWorkflow( monitorIds = listOf(monitorResponse.id) ) val workflowResponse = upsertWorkflow(workflowRequest)!! From 486c5ab2b1e860c0a6c62c3a668f6175ba92c76d Mon Sep 17 00:00:00 2001 From: Stevan Buzejic Date: Mon, 13 Mar 2023 16:59:46 +0100 Subject: [PATCH 18/18] Added test when updating the non-existing workflow Signed-off-by: Stevan Buzejic --- .../opensearch/alerting/WorkflowMonitorIT.kt | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt index 93b94ac9e..1a5d94e22 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/WorkflowMonitorIT.kt @@ -361,6 +361,45 @@ class WorkflowMonitorIT : WorkflowSingleNodeTestCase() { assertEquals("Delegate1 id not correct", monitorResponse1.id, delegate1.monitorId) } + fun `test update workflow doesn't exist failure`() { + val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3") + val docLevelInput = DocLevelMonitorInput( + "description", listOf(index), listOf(docQuery1) + ) + val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) + val customFindingsIndex = "custom_findings_index" + val customFindingsIndexPattern = "custom_findings_index-1" + val customQueryIndex = "custom_alerts_index" + val monitor1 = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern + ) + ) + + val monitorResponse1 = createMonitor(monitor1)!! + + val workflow = randomWorkflow( + monitorIds = listOf(monitorResponse1.id) + ) + val workflowResponse = upsertWorkflow(workflow)!! + assertNotNull("Workflow creation failed", workflowResponse) + + try { + upsertWorkflow(workflow, "testId", RestRequest.Method.PUT) + } catch (e: Exception) { + e.message?.let { + assertTrue( + "Exception not returning GetWorkflow Action error ", + it.contains("Workflow with testId is not found") + ) + } + } + } + fun `test get workflow`() { val docLevelInput = DocLevelMonitorInput( "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3"))