diff --git a/.ci/validation/package-lock.json b/.ci/validation/package-lock.json index a2a7346c..ad60031d 100644 --- a/.ci/validation/package-lock.json +++ b/.ci/validation/package-lock.json @@ -11,7 +11,8 @@ "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^2.1.1", - "js-yaml": "^4.1.0" + "js-yaml": "^4.1.0", + "marked": "^13.0.0" }, "devDependencies": { "@types/jest": "^29.5.12", @@ -3237,6 +3238,17 @@ "tmpl": "1.0.5" } }, + "node_modules/marked": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-13.0.0.tgz", + "integrity": "sha512-VTeDCd9txf4KLLljUZ0nljE/Incb9SrWuueE44QVuU0pkOdh4sfCeW1Z6lPcxyDRSVY6rm8db/0OPaN75RNUmw==", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", diff --git a/.ci/validation/package.json b/.ci/validation/package.json index d6371b25..c1f07f2b 100644 --- a/.ci/validation/package.json +++ b/.ci/validation/package.json @@ -25,6 +25,7 @@ "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^2.1.1", - "js-yaml": "^4.1.0" + "js-yaml": "^4.1.0", + "marked": "^13.0.0" } } diff --git a/.ci/validation/src/dsl.test.ts b/.ci/validation/src/dsl.test.ts new file mode 100644 index 00000000..dd01702f --- /dev/null +++ b/.ci/validation/src/dsl.test.ts @@ -0,0 +1,53 @@ +/* + * Copyright 2023-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { SWSchemaValidator } from "./index"; +import fs from "node:fs"; +import path from "node:path"; +import marked from "marked"; + +SWSchemaValidator.prepareSchemas(); + +const dslReferencePath = path.join( + __dirname, + "..", + "..", + "..", + "dsl-reference.md" +); + +describe(`Verify every example in the dsl docs`, () => { + const workflows = marked + .lexer(fs.readFileSync(dslReferencePath, SWSchemaValidator.defaultEncoding)) + .filter((item): item is marked.Tokens.Code => item.type === "code") + .filter((item) => item.lang === "yaml") + .map((item) => item.text) + .map((text) => SWSchemaValidator.yamlToJSON(text)) + .filter((workflow) => typeof workflow === "object") + .filter((workflow) => "document" in workflow) + .filter((workflow) => "dsl" in workflow.document); + + test.each(workflows)("$document.name", (workflow) => { + const results = SWSchemaValidator.validateSchema(workflow); + if (results?.errors) { + console.warn( + `Schema validation on workflow ${workflow.document.name} failed with: `, + JSON.stringify(results.errors, null, 2) + ); + } + expect(results?.valid).toBeTruthy(); + }); +}); diff --git a/.ci/validation/src/index.test.ts b/.ci/validation/src/examples.test.ts similarity index 51% rename from .ci/validation/src/index.test.ts rename to .ci/validation/src/examples.test.ts index eda2381a..0133c22f 100644 --- a/.ci/validation/src/index.test.ts +++ b/.ci/validation/src/examples.test.ts @@ -15,33 +15,35 @@ */ import { SWSchemaValidator } from "./index"; -import fs from "fs"; -import { join } from "path"; +import fs from "node:fs"; +import path from "node:path"; SWSchemaValidator.prepareSchemas(); const examplePath = "../../../examples"; describe(`Verify every example in the repository`, () => { - fs.readdirSync(join(__dirname, examplePath), { - encoding: SWSchemaValidator.defaultEncoding, - recursive: false, - withFileTypes: true, - }).forEach((file) => { - if (file.isFile() && file.name.endsWith(".yaml")) { - test(`Example ${file.name}`, () => { - const workflow = SWSchemaValidator.toJSON( - join(__dirname, `${examplePath}/${file.name}`) - ); - const results = SWSchemaValidator.validateSchema(workflow); - if (results?.errors != null) { - console.warn( - `Schema validation on ${file.name} failed with: `, - JSON.stringify(results.errors, null, 2) - ); - } - expect(results?.valid).toBeTruthy(); - }); + const examples = fs + .readdirSync(path.join(__dirname, examplePath), { + encoding: SWSchemaValidator.defaultEncoding, + recursive: false, + withFileTypes: true, + }) + .filter((file) => file.isFile()) + .filter((file) => file.name.endsWith(".yaml")) + .map((file) => file.name); + + test.each(examples)("Example %s", (file) => { + const workflow = SWSchemaValidator.loadAsJSON( + path.join(__dirname, `${examplePath}/${file}`) + ); + const results = SWSchemaValidator.validateSchema(workflow); + if (results?.errors) { + console.warn( + `Schema validation on ${file} failed with: `, + JSON.stringify(results.errors, null, 2) + ); } + expect(results?.valid).toBeTruthy(); }); }); diff --git a/.ci/validation/src/index.ts b/.ci/validation/src/index.ts index f7a16112..7e2b02a8 100644 --- a/.ci/validation/src/index.ts +++ b/.ci/validation/src/index.ts @@ -14,10 +14,10 @@ * limitations under the License. */ -import fs from "fs"; -import Ajv from "ajv"; +import fs from "node:fs"; +import Ajv from "ajv/dist/2020"; import addFormats from "ajv-formats"; -import { join } from "path"; +import path from "node:path"; import yaml = require("js-yaml"); export module SWSchemaValidator { @@ -25,43 +25,51 @@ export module SWSchemaValidator { addFormats(ajv); const workflowSchemaId = - "https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.json"; + "https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.yaml"; const schemaPath = "../../../schema"; export const defaultEncoding = "utf-8"; export function prepareSchemas() { - fs.readdirSync(join(__dirname, schemaPath), { + const files = fs.readdirSync(path.join(__dirname, schemaPath), { encoding: defaultEncoding, recursive: false, withFileTypes: true, - }).forEach((file) => { - if (file.isFile()) { - ajv.addSchema(syncReadSchema(file.name)); - } }); + + files + .filter((file) => file.isFile()) + .forEach((file) => { + ajv.addSchema(syncReadSchema(file.name)); + }); } - function syncReadSchema(filename: string) { - return toJSON(join(__dirname, `${schemaPath}/${filename}`)); + function syncReadSchema(filename: string): any { + return loadAsJSON(path.join(__dirname, `${schemaPath}/${filename}`)); } - export function toJSON(filename: string) { - const yamlObj = yaml.load(fs.readFileSync(filename, defaultEncoding), { + export function loadAsJSON(filename: string): any { + return yamlToJSON(fs.readFileSync(filename, defaultEncoding)); + } + + export function yamlToJSON(yamlStr: string): any { + const yamlObj = yaml.load(yamlStr, { json: true, }); - return JSON.parse(JSON.stringify(yamlObj, null, 2)); + return structuredClone(yamlObj); } - export function validateSchema(workflow: JSON) { + export function validateSchema(workflow: Record) { const validate = ajv.getSchema(workflowSchemaId); - if (validate != undefined) { - const isValid = validate(workflow); - return { - valid: isValid, - errors: validate.errors, - }; + + if (!validate) { + throw new Error(`Failed to validate schema on workflow`); } - throw new Error(`Failed to validate schema on workflow`); + + const isValid = validate(workflow); + return { + valid: isValid, + errors: validate.errors, + }; } } diff --git a/.ci/validation/src/invalid.test.ts b/.ci/validation/src/invalid.test.ts new file mode 100644 index 00000000..73929454 --- /dev/null +++ b/.ci/validation/src/invalid.test.ts @@ -0,0 +1,43 @@ +/* + * Copyright 2023-Present The Serverless Workflow Specification Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { SWSchemaValidator } from "./index"; +import fs from "node:fs"; +import path from "node:path"; + +SWSchemaValidator.prepareSchemas(); + +const invalidPath = "../test/fixtures/invalid"; + +describe(`Check that invalid workflows are rejected`, () => { + const examples = fs + .readdirSync(path.join(__dirname, invalidPath), { + encoding: SWSchemaValidator.defaultEncoding, + recursive: false, + withFileTypes: true, + }) + .filter((file) => file.isFile()) + .filter((file) => file.name.endsWith(".yaml")) + .map((file) => file.name); + + test.each(examples)("Example %s", (file) => { + const workflow = SWSchemaValidator.loadAsJSON( + path.join(__dirname, `${invalidPath}/${file}`) + ); + const results = SWSchemaValidator.validateSchema(workflow); + expect(results?.valid).toBeFalsy(); + }); +}); diff --git a/.ci/validation/test/fixtures/invalid/extra-property-in-call.yaml b/.ci/validation/test/fixtures/invalid/extra-property-in-call.yaml new file mode 100644 index 00000000..a400e257 --- /dev/null +++ b/.ci/validation/test/fixtures/invalid/extra-property-in-call.yaml @@ -0,0 +1,12 @@ +document: + dsl: 1.0.0-alpha1 + namespace: examples + name: two-tasks-in-one-item + version: 1.0.0-alpha1 +do: + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} + foo: bar diff --git a/.ci/validation/test/fixtures/invalid/two-tasks-in-one-item.yaml b/.ci/validation/test/fixtures/invalid/two-tasks-in-one-item.yaml new file mode 100644 index 00000000..aed92cbf --- /dev/null +++ b/.ci/validation/test/fixtures/invalid/two-tasks-in-one-item.yaml @@ -0,0 +1,14 @@ +document: + dsl: 1.0.0-alpha1 + namespace: examples + name: two-tasks-in-one-item + version: 1.0.0-alpha1 +do: + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} + setMessage: + set: + message: "Looking for {petId}" diff --git a/ctk/features/branch.feature b/ctk/features/branch.feature new file mode 100644 index 00000000..af81bc8c --- /dev/null +++ b/ctk/features/branch.feature @@ -0,0 +1,31 @@ +Feature: Composite Task + As an implementer of the workflow DSL + I want to ensure that composite tasks can be executed within the workflow + So that my implementation conforms to the expected behavior + + # Tests composite tasks With competing concurrent sub tasks + Scenario: Fork Task With Competing Concurrent Sub Tasks + Given a workflow with definition: + """yaml + document: + dsl: 1.0.0-alpha1 + namespace: default + name: fork + do: + - branchWithCompete: + fork: + compete: true + branches: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } + """ + When the workflow is executed + Then the workflow should complete + And the workflow output should have a 'colors' property containing 1 items \ No newline at end of file diff --git a/ctk/features/call.feature b/ctk/features/call.feature index 0db32456..cf3c591c 100644 --- a/ctk/features/call.feature +++ b/ctk/features/call.feature @@ -15,13 +15,14 @@ Feature: Call Task namespace: default name: http-call-with-content-output do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/findByStatus?status={status} - output: - from: .[0] + - findPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/findByStatus?status={status} + output: + from: .[0] """ And given the workflow input is: """yaml @@ -42,12 +43,13 @@ Feature: Call Task namespace: default name: http-call-with-response-output do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/{petId} - output: response + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/{petId} + output: response """ And given the workflow input is: """yaml @@ -68,15 +70,16 @@ Feature: Call Task namespace: default name: http-call-with-basic-auth do: - call: http - with: - method: get - endpoint: - uri: https://httpbin.org/basic-auth/{username}/{password} - authentication: - basic: - username: ${ .username } - password: ${ .password } + - login: + call: http + with: + method: get + endpoint: + uri: https://httpbin.org/basic-auth/{username}/{password} + authentication: + basic: + username: ${ .username } + password: ${ .password } """ And given the workflow input is: """yaml @@ -96,15 +99,16 @@ Feature: Call Task namespace: default name: openapi-call-with-content-output do: - call: openapi - with: - document: - uri: https://petstore.swagger.io/v2/swagger.json - operation: findPetsByStatus - parameters: - status: ${ .status } - output: - from: . | length + - findPet: + call: openapi + with: + document: + uri: "https://petstore.swagger.io/v2/swagger.json" + operation: findPetsByStatus + parameters: + status: ${ .status } + output: + from: . | length """ And given the workflow input is: """yaml @@ -123,14 +127,15 @@ Feature: Call Task namespace: default name: openapi-call-with-response-output do: - call: openapi - with: - document: - uri: https://petstore.swagger.io/v2/swagger.json - operation: getPetById - parameters: - petId: ${ .petId } - output: response + - getPet: + call: openapi + with: + document: + uri: "https://petstore.swagger.io/v2/swagger.json" + operation: getPetById + parameters: + petId: ${ .petId } + output: response """ And given the workflow input is: """yaml diff --git a/ctk/features/composite.feature b/ctk/features/composite.feature deleted file mode 100644 index 0cc3b799..00000000 --- a/ctk/features/composite.feature +++ /dev/null @@ -1,57 +0,0 @@ -Feature: Composite Task - As an implementer of the workflow DSL - I want to ensure that composite tasks can be executed within the workflow - So that my implementation conforms to the expected behavior - - # Tests composite tasks with sequential sub tasks - Scenario: Composite Task With Sequential Sub Tasks - Given a workflow with definition: - """yaml - document: - dsl: 1.0.0-alpha1 - namespace: default - name: composite-sequential - do: - execute: - sequentially: - - setRed: - set: - colors: ${ .colors + ["red"] } - - setGreen: - set: - colors: ${ .colors + ["green"] } - - setBlue: - set: - colors: ${ .colors + ["blue"] } - """ - When the workflow is executed - Then the workflow should complete with output: - """yaml - colors: [ red, green, blue ] - """ - - # Tests composite tasks With competing concurrent sub tasks - Scenario: Composite Task With Competing Concurrent Sub Tasks - Given a workflow with definition: - """yaml - document: - dsl: 1.0.0-alpha1 - namespace: default - name: composite-sequential - do: - execute: - compete: true - concurrently: - - setRed: - set: - colors: ${ .colors + ["red"] } - - setGreen: - set: - colors: ${ .colors + ["green"] } - - setBlue: - set: - colors: ${ .colors + ["blue"] } - """ - When the workflow is executed - Then the workflow should complete - And the workflow output should have a 'colors' property containing 1 items \ No newline at end of file diff --git a/ctk/features/data-flow.feature b/ctk/features/data-flow.feature index 36e6af10..fefc4dc0 100644 --- a/ctk/features/data-flow.feature +++ b/ctk/features/data-flow.feature @@ -12,10 +12,11 @@ Feature: Data Flow namespace: default name: output-filtering do: - input: - from: .user.claims.subject #filters the input of the task, using only the user's subject - set: - playerId: ${ . } + - setPlayerId: + input: + from: .user.claims.subject #filters the input of the task, using only the user's subject + set: + playerId: ${ . } """ And given the workflow input is: """yaml @@ -38,13 +39,14 @@ Feature: Data Flow namespace: default name: output-filtering do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/{petId} #simple interpolation, only possible with top level variables - output: - as: .id #filters the output of the http call, using only the id of the returned object + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/{petId} #simple interpolation, only possible with top level variables + output: + as: .id #filters the output of the http call, using only the id of the returned object """ And given the workflow input is: """yaml @@ -65,24 +67,22 @@ Feature: Data Flow namespace: default name: non-object-output do: - execute: - sequentially: - - getPetById1: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/{petId} #simple interpolation, only possible with top level variables - output: - as: .id - - getPetById2: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/2 - output: - as: '{ ids: [ $input, .id ] }' + - getPetById1: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/{petId} #simple interpolation, only possible with top level variables + output: + as: .id + - getPetById2: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/2 + output: + as: '{ ids: [ $input, .id ] }' """ When the workflow is executed Then the workflow should complete with output: diff --git a/ctk/features/do.feature b/ctk/features/do.feature new file mode 100644 index 00000000..e84aa7d3 --- /dev/null +++ b/ctk/features/do.feature @@ -0,0 +1,31 @@ +Feature: Composite Task + As an implementer of the workflow DSL + I want to ensure that composite tasks can be executed within the workflow + So that my implementation conforms to the expected behavior + + # Tests composite tasks with sequential sub tasks + Scenario: Task With Sequential Sub Tasks + Given a workflow with definition: + """yaml + document: + dsl: 1.0.0-alpha1 + namespace: default + name: do + do: + - compositeExample: + do: + - setRed: + set: + colors: ${ .colors + ["red"] } + - setGreen: + set: + colors: ${ .colors + ["green"] } + - setBlue: + set: + colors: ${ .colors + ["blue"] } + """ + When the workflow is executed + Then the workflow should complete with output: + """yaml + colors: [ red, green, blue ] + """ \ No newline at end of file diff --git a/ctk/features/emit.feature b/ctk/features/emit.feature index 4531be46..871ec7f2 100644 --- a/ctk/features/emit.feature +++ b/ctk/features/emit.feature @@ -12,13 +12,14 @@ Feature: Emit Task namespace: default name: emit do: - emit: - event: - with: - source: https://fake-source.com - type: com.fake-source.user.greeted.v1 - data: - greetings: ${ "Hello \(.user.firstName) \(.user.lastName)!" } + - emitEvent: + emit: + event: + with: + source: https://fake-source.com + type: com.fake-source.user.greeted.v1 + data: + greetings: ${ "Hello \(.user.firstName) \(.user.lastName)!" } """ And given the workflow input is: """yaml diff --git a/ctk/features/flow.feature b/ctk/features/flow.feature index cad4a786..16917946 100644 --- a/ctk/features/flow.feature +++ b/ctk/features/flow.feature @@ -11,17 +11,15 @@ Feature: Flow Directive namespace: default name: implicit-sequence do: - execute: - sequentially: - - setRed: - set: - colors: '${ .colors + [ "red" ] }' - - setGreen: - set: - colors: '${ .colors + [ "green" ] }' - - setBlue: - set: - colors: '${ .colors + [ "blue" ] }' + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' """ When the workflow is executed Then the workflow should complete with output: @@ -40,20 +38,18 @@ Feature: Flow Directive namespace: default name: explicit-sequence do: - execute: - sequentially: - - setRed: - set: - colors: '${ .colors + [ "red" ] }' - then: setGreen - - setBlue: - set: - colors: '${ .colors + [ "blue" ] }' - then: end - - setGreen: - set: - colors: '${ .colors + [ "green" ] }' - then: setBlue + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: setGreen + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: setBlue """ When the workflow is executed Then the workflow should complete with output: diff --git a/ctk/features/for.feature b/ctk/features/for.feature index 0423622f..3ac783e5 100644 --- a/ctk/features/for.feature +++ b/ctk/features/for.feature @@ -14,12 +14,14 @@ Feature: For Task namespace: default name: for do: - for: - each: color - in: '.colors' - do: - set: - processed: '${ { colors: (.processed.colors + [ $color ]), indexes: (.processed.indexes + [ $index ])} }' + - loopColors: + for: + each: color + in: '.colors' + do: + - markProcessed: + set: + processed: '${ { colors: (.processed.colors + [ $color ]), indexes: (.processed.indexes + [ $index ])} }' """ And given the workflow input is: """yaml diff --git a/ctk/features/raise.feature b/ctk/features/raise.feature index e7df2459..39091dc1 100644 --- a/ctk/features/raise.feature +++ b/ctk/features/raise.feature @@ -11,11 +11,12 @@ Feature: Raise Task namespace: default name: raise-custom-error do: - raise: - error: - status: 400 - type: https://serverlessworkflow.io/errors/types/compliance - title: Compliance Error + - raiseError: + raise: + error: + status: 400 + type: https://serverlessworkflow.io/errors/types/compliance + title: Compliance Error """ When the workflow is executed Then the workflow should fault with error: diff --git a/ctk/features/set.feature b/ctk/features/set.feature index f9cbb64a..c0772f88 100644 --- a/ctk/features/set.feature +++ b/ctk/features/set.feature @@ -12,10 +12,11 @@ Feature: Set Task namespace: default name: set do: - set: - shape: circle - size: ${ .configuration.size } - fill: ${ .configuration.fill } + - setShape: + set: + shape: circle + size: ${ .configuration.size } + fill: ${ .configuration.fill } """ And given the workflow input is: """yaml diff --git a/ctk/features/switch.feature b/ctk/features/switch.feature index f101bef9..bba64c00 100644 --- a/ctk/features/switch.feature +++ b/ctk/features/switch.feature @@ -11,31 +11,29 @@ Feature: Switch Task namespace: default name: switch-match do: - execute: - sequentially; - - switchColor: - switch: - - red: - when: '.color == "red"' - then: setRed - - green: - when: '.color == "green"' - then: setGreen - - blue: - when: '.color == "blue"' - then: setBlue - - setRed: - set: - colors: '${ .colors + [ "red" ] }' - then: end - - setGreen: - set: - colors: '${ .colors + [ "green" ] }' - then: end - - setBlue: - set: - colors: '${ .colors + [ "blue" ] }' - then: end + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - blue: + when: '.color == "blue"' + then: setBlue + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + then: end + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + then: end + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' + then: end """ And given the workflow input is: """yaml @@ -57,29 +55,27 @@ Feature: Switch Task namespace: default name: switch-default-implicit do: - execute: - sequentially: - - switchColor: - switch: - - red: - when: '.color == "red"' - then: setRed - - green: - when: '.color == "green"' - then: setGreen - - blue: - when: '.color == "blue"' - then: setBlue - then: end - - setRed: - set: - colors: '${ .colors + [ "red" ] }' - - setGreen: - set: - colors: '${ .colors + [ "green" ] }' - - setBlue: - set: - colors: '${ .colors + [ "blue" ] }' + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - blue: + when: '.color == "blue"' + then: setBlue + then: end + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' """ And given the workflow input is: """yaml @@ -101,33 +97,31 @@ Feature: Switch Task namespace: default name: switch-default-implicit do: - execute: - sequentially: - - switchColor: - switch: - - red: - when: '.color == "red"' - then: setRed - - green: - when: '.color == "green"' - then: setGreen - - blue: - when: '.color == "blue"' - then: setBlue - - anyOtherColor: - then: setCustomColor - - setRed: - set: - colors: '${ .colors + [ "red" ] }' - - setGreen: - set: - colors: '${ .colors + [ "green" ] }' - - setBlue: - set: - colors: '${ .colors + [ "blue" ] }' - - setCustomColor: - set: - colors: '${ .colors + [ $input.color ] }' + - switchColor: + switch: + - red: + when: '.color == "red"' + then: setRed + - green: + when: '.color == "green"' + then: setGreen + - blue: + when: '.color == "blue"' + then: setBlue + - anyOtherColor: + then: setCustomColor + - setRed: + set: + colors: '${ .colors + [ "red" ] }' + - setGreen: + set: + colors: '${ .colors + [ "green" ] }' + - setBlue: + set: + colors: '${ .colors + [ "blue" ] }' + - setCustomColor: + set: + colors: '${ .colors + [ $input.color ] }' """ And given the workflow input is: """yaml diff --git a/ctk/features/try.feature b/ctk/features/try.feature index 893f9ff6..ebcb9449 100644 --- a/ctk/features/try.feature +++ b/ctk/features/try.feature @@ -15,21 +15,24 @@ Feature: Try Task namespace: default name: try-catch-404 do: - try: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/getPetByName/{petName} - catch: - errors: - with: - type: https://serverlessworkflow.io/dsl/errors/types/communication - status: 404 - as: err - do: - set: - error: ${ $err } + - tryGetPet: + try: + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/getPetByName/{petName} + catch: + errors: + with: + type: https://serverlessworkflow.io/dsl/errors/types/communication + status: 404 + as: err + do: + - setError: + set: + error: ${ $err } """ And given the workflow input is: """yaml @@ -55,21 +58,24 @@ Feature: Try Task namespace: default name: try-catch-503 do: - try: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/getPetByName/{petName} - catch: - errors: - with: - type: https://serverlessworkflow.io/dsl/errors/types/communication - status: 503 - as: err - do: - set: - error: ${ $err } + - tryGetPet: + try: + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/getPetByName/{petName} + catch: + errors: + with: + type: https://serverlessworkflow.io/dsl/errors/types/communication + status: 503 + as: err + do: + - setError: + set: + error: ${ $err } """ And given the workflow input is: """yaml diff --git a/dsl-reference.md b/dsl-reference.md index f1b9ec59..9229708f 100644 --- a/dsl-reference.md +++ b/dsl-reference.md @@ -14,7 +14,8 @@ + [gRPC](#grpc-call) + [HTTP](#http-call) + [OpenAPI](#openapi-call) - - [Composite](#composite) + - [Do](#do) + - [Fork](#fork) - [Emit](#emit) - [For](#for) - [Listen](#listen) @@ -44,7 +45,7 @@ + [Retry](#retry) + [Input](#input) + [Output](#output) - + [Export] (#export) + + [Export](#export) + [Timeout](#timeout) + [Duration](#duration) + [HTTP Response](#http-response) @@ -67,7 +68,7 @@ A [workflow](#workflow) serves as a blueprint outlining the series of [tasks](#t | document | [`document`](#document) | `yes` | Documents the defined workflow. | | input | [`input`](#input) | `no` | Configures the workflow's input. | | use | [`use`](#use) | `no` | Defines the workflow's reusable components, if any. | -| do | [`task`](#task) | `yes` | The [task](#task) that must be performed by the [workflow](#workflow). | +| do | [`map[string, task][]`](#task) | `yes` | The [task(s)](#task) that must be performed by the [workflow](#workflow). | | timeout | [`timeout`](#timeout) | `no` | The configuration, if any, of the workflow's timeout. | | output | [`output`](#output) | `no` | Configures the workflow's output. | | schedule | [`schedule`](#schedule) | `no` | Configures the workflow's schedule, if any. | @@ -130,79 +131,86 @@ document: version: '1.0.0' title: Order Pet - 1.0.0 summary: > - # Order Pet - 1.0.0 - ## Table of Contents - - [Description](#description) - - [Requirements](#requirements) - ## Description - A sample workflow used to process an hypothetic pet order using the [PetStore API](https://petstore.swagger.io/) - ## Requirements - ### Secrets - - my-oauth2-secret + # Order Pet - 1.0.0 + ## Table of Contents + - [Description](#description) + - [Requirements](#requirements) + ## Description + A sample workflow used to process an hypothetic pet order using the [PetStore API](https://petstore.swagger.io/) + ## Requirements + ### Secrets + - my-oauth2-secret use: authentications: petStoreOAuth2: - oauth2: my-oauth2-secret + oauth2: + authority: https://petstore.swagger.io/.well-known/openid-configuration + grant: client-credentials + client: + id: workflow-runtime + secret: "**********" + scopes: [ api ] + audiences: [ runtime ] extensions: - externalLogging: extend: all before: - call: http - with: - method: post - endpoint: https://fake.log.collector.com - body: - message: "${ \"Executing task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + endpoint: https://fake.log.collector.com + body: + message: ${ "Executing task '\($task.reference)'..." } after: - call: http - with: - method: post - endpoint: https://fake.log.collector.com - body: - message: "${ \"Executed task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + endpoint: https://fake.log.collector.com + body: + message: ${ "Executed task '\($task.reference)'..." } functions: getAvailablePets: call: openapi with: document: uri: https://petstore.swagger.io/v2/swagger.json - operation: findByStatus + operationId: findByStatus parameters: status: available secrets: - my-oauth2-secret do: - execute: - sequentially: - - getAvailablePets: - call: getAvailablePets - output: - as: "$input + { availablePets: [.[] | select(.category.name == "dog" and (.tags[] | .breed == $input.order.breed))] }" - - submitMatchesByMail: - call: http - with: - method: post - endpoint: - uri: https://fake.smtp.service.com/email/send - authentication: petStoreOAuth2 - body: - from: noreply@fake.petstore.com - to: ${ .order.client.email } - subject: Candidates for Adoption - body: > - Hello ${ .order.client.preferredDisplayName }! + - getAvailablePets: + call: getAvailablePets + output: + as: "$input + { availablePets: [.[] | select(.category.name == \"dog\" and (.tags[] | .breed == $input.order.breed))] }" + - submitMatchesByMail: + call: http + with: + method: post + endpoint: + uri: https://fake.smtp.service.com/email/send + authentication: petStoreOAuth2 + body: + from: noreply@fake.petstore.com + to: ${ .order.client.email } + subject: Candidates for Adoption + body: > + Hello ${ .order.client.preferredDisplayName }! - Following your interest to adopt a dog, here is a list of candidates that you might be interested in: + Following your interest to adopt a dog, here is a list of candidates that you might be interested in: - ${ .pets | map("-\(.name)") | join("\n") } + ${ .pets | map("-\(.name)") | join("\n") } - Please do not hesistate to contact us at info@fake.petstore.com if your have questions. + Please do not hesistate to contact us at info@fake.petstore.com if your have questions. - Hope to hear from you soon! + Hope to hear from you soon! - ---------------------------------------------------------------------------------------------- - DO NOT REPLY - ---------------------------------------------------------------------------------------------- + ---------------------------------------------------------------------------------------------- + DO NOT REPLY + ---------------------------------------------------------------------------------------------- ``` ### Task @@ -218,7 +226,8 @@ By breaking down the [workflow](#workflow) into manageable [tasks](#task), organ The Serverless Workflow DSL defines a list of [tasks](#task) that **must be** supported by all runtimes: - [Call](#call), used to call services and/or functions. -- [Composite](#composite), used to define a minimum of two subtasks to perform. +- [Do](#do), used to define one or more subtasks to perform in sequence. +- [Fork](#fork), used to define one or more subtasks to perform concurrently. - [Emit](#emit), used to emit [events](#event). - [For](#for), used to iterate over a collection of items, and conditionally perform a task for each of them. - [Listen](#listen), used to listen for an [event](#event) or more. @@ -256,13 +265,14 @@ Enables the execution of a specified function within a workflow, allowing seamle document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: call-example version: '0.1.0' do: - call: http - with: - method: get - endpoint: https://petstore.swagger.io/v2/pet/{petId} + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} ``` Serverless Workflow defines several default functions that **MUST** be supported by all implementations and runtimes: @@ -294,18 +304,19 @@ The [AsyncAPI Call](#asyncapi-call) enables workflows to interact with external document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: asyncapi-example version: '0.1.0' do: - call: asyncapi - with: - document: https://fake.com/docs/asyncapi.json - operation: findPetsByStatus - server: staging - message: getPetByStatusQuery - binding: http - payload: - petId: ${ .pet.id } + - findPet: + call: asyncapi + with: + document: https://fake.com/docs/asyncapi.json + operationRef: findPetsByStatus + server: staging + message: getPetByStatusQuery + binding: http + payload: + petId: ${ .pet.id } ``` ##### gRPC Call @@ -330,19 +341,20 @@ The [gRPC Call](#grpc-call) enables communication with external systems via the document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: grpc-example version: '0.1.0' do: - call: grpc - with: - proto: file://app/greet.proto - service: - name: GreeterApi.Greeter - host: localhost - port: 5011 - method: SayHello - arguments: - name: ${ .user.preferredDisplayName } + - greet: + call: grpc + with: + proto: file://app/greet.proto + service: + name: GreeterApi.Greeter + host: localhost + port: 5011 + method: SayHello + arguments: + name: ${ .user.preferredDisplayName } ``` ##### HTTP Call @@ -365,13 +377,14 @@ The [HTTP Call](#http-call) enables workflows to interact with external services document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: http-example version: '0.1.0' do: - call: http - with: - method: get - endpoint: https://petstore.swagger.io/v2/pet/{petId} + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} ``` ##### OpenAPI Call @@ -394,102 +407,112 @@ The [OpenAPI Call](#openapi-call) enables workflows to interact with external se document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: openapi-example version: '0.1.0' do: - call: openapi - with: - document: https://petstore.swagger.io/v2/swagger.json - operation: findPetsByStatus - parameters: - status: available + - findPet: + call: openapi + with: + document: https://petstore.swagger.io/v2/swagger.json + operationId: findPetsByStatus + parameters: + status: available ``` -#### Composite +#### Do - Serves as a pivotal orchestrator within workflow systems, enabling the seamless integration and execution of multiple subtasks to accomplish complex operations. By encapsulating and coordinating various subtasks, this task type facilitates the efficient execution of intricate workflows. +Serves as a fundamental building block within workflows, enabling the sequential execution of multiple subtasks. By defining a series of subtasks to perform in sequence, the Do task facilitates the efficient execution of complex operations, ensuring that each subtask is completed before the next one begins. ##### Properties | Name | Type | Required | Description| |:--|:---:|:---:|:---| -| execute.sequentially | [`map[string, task][]`](#task) | `no` | The tasks to perform sequentially.
*Required if `execute.concurrently` has not been set, otherwise ignored.*
*If set, must contains **at least** two [`tasks`](#task).* | -| execute.concurrently | [`map[string, task][]`](#task) | `no` | The tasks to perform concurrently.
*Required if `execute.sequentially` has not been set, otherwise ignored.*
*If set, must contains **at least** two [`tasks`](#task).* | -| execute.compete | `boolean` | `no` | Indicates whether or not the concurrent [`tasks`](#task) are racing against each other, with a single possible winner, which sets the composite task's output.
*Ignored if `execute.sequentially` has been set. Defaults to `false`.*
*Must **not** be set if the [`tasks`](#task) are executed sequentially.* | +| do | [`map[string, task][]`](#task) | `no` | The tasks to perform sequentially. | ##### Examples -*Executing tasks sequentially:* ```yaml document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: do-example version: '0.1.0' do: - execute: - sequentially: - - bookHotel: - call: http - with: - method: post - endpoint: - uri: https://fake-booking-agency.com/hotels/book - authentication: fake-booking-agency-oauth2 - body: - name: Four Seasons - city: Antwerp + - bookHotel: + call: http + with: + method: post + endpoint: + uri: https://fake-booking-agency.com/hotels/book + authentication: fake-booking-agency-oauth2 + body: + name: Four Seasons + city: Antwerp + country: Belgium + - bookFlight: + call: http + with: + method: post + endpoint: + uri: https://fake-booking-agency.com/flights/book + authentication: fake-booking-agency-oauth2 + body: + departure: + date: '01/01/26' + time: '07:25:00' + from: + airport: BRU + city: Zaventem country: Belgium - - bookFlight: - call: http - with: - method: post - endpoint: - uri: https://fake-booking-agency.com/flights/book - authentication: fake-booking-agency-oauth2 - body: - departure: - date: '01/01/26' - time: '07:25:00' - from: - airport: BRU - city: Zaventem - country: Belgium - arrival: - date: '01/01/26' - time: '11:12:00' - to: - airport: LIS - city: Lisbon - country: Portugal + arrival: + date: '01/01/26' + time: '11:12:00' + to: + airport: LIS + city: Lisbon + country: Portugal ``` -*Executing tasks concurrently:* +#### Fork + +Allows workflows to execute multiple subtasks concurrently, enabling parallel processing and improving the overall efficiency of the workflow. By defining a set of subtasks to perform concurrently, the Fork task facilitates the execution of complex operations in parallel, ensuring that multiple tasks can be executed simultaneously. + +##### Properties + +| Name | Type | Required | Description| +|:--|:---:|:---:|:---| +| fork.branches | [`map[string, task][]`](#task) | `no` | The tasks to perform concurrently. | +| fork.compete | `boolean` | `no` | Indicates whether or not the concurrent [`tasks`](#task) are racing against each other, with a single possible winner, which sets the composite task's output. Defaults to `false`. | + +##### Examples + ```yaml document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: fork-example version: '0.1.0' do: - execute: - concurrently: - - callNurse: - call: http - with: - method: put - endpoint: https://fake-hospital.com/api/v3/alert/nurses - body: - patientId: ${ .patient.fullName } - room: ${ .room.number } - - callDoctor: - call: http - with: - method: put - endpoint: https://fake-hospital.com/api/v3/alert/doctor - body: - patientId: ${ .patient.fullName } - room: ${ .room.number } + - raiseAlarm: + fork: + compete: true + branches: + - callNurse: + call: http + with: + method: put + endpoint: https://fake-hospital.com/api/v3/alert/nurses + body: + patientId: ${ .patient.fullName } + room: ${ .room.number } + - callDoctor: + call: http + with: + method: put + endpoint: https://fake-hospital.com/api/v3/alert/doctor + body: + patientId: ${ .patient.fullName } + room: ${ .room.number } ``` #### Emit @@ -508,21 +531,21 @@ Allows workflows to publish events to event brokers or messaging systems, facili document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: emit-example version: '0.1.0' do: - emit: - event: - with: - source: https://petstore.com - type: com.petstore.order.placed.v1 - data: - client: - firstName: Cruella - lastName: de Vil - items: - - breed: dalmatian - quantity: 101 + - emitEvent: + emit: + event: + source: https://petstore.com + type: com.petstore.order.placed.v1 + data: + client: + firstName: Cruella + lastName: de Vil + items: + - breed: dalmatian + quantity: 101 ``` #### For @@ -545,22 +568,24 @@ Allows workflows to iterate over a collection of items, executing a defined set document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: for-example version: '0.1.0' do: - for: - each: pet - in: .pets - at: index - while: .vet != null - do: - listen: - to: - one: - with: - type: com.fake.petclinic.pets.checkup.completed.v2 - output: - as: '.pets + [{ "id": $pet.id }]' + - checkup: + for: + each: pet + in: .pets + at: index + while: .vet != null + do: + - waitForCheckup: + listen: + to: + one: + with: + type: com.fake.petclinic.pets.checkup.completed.v2 + output: + as: '.pets + [{ "id": $pet.id }]' ``` #### Listen @@ -579,20 +604,21 @@ Provides a mechanism for workflows to await and react to external events, enabli document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: listen-example version: '0.1.0' do: - listen: - to: - any: - - with: - type: com.fake-hospital.vitals.measurements.temperature - data: - temperature: ${ .temperature > 38 } - - with: - type: com.fake-hospital.vitals.measurements.bpm - data: - temperature: ${ .bpm < 60 or .bpm > 100 } + - callDoctor: + listen: + to: + any: + - with: + type: com.fake-hospital.vitals.measurements.temperature + data: + temperature: ${ .temperature > 38 } + - with: + type: com.fake-hospital.vitals.measurements.bpm + data: + temperature: ${ .bpm < 60 or .bpm > 100 } ``` #### Raise @@ -611,33 +637,50 @@ Intentionally triggers and propagates errors. By employing the "Raise" task, wor document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: raise-example version: '0.1.0' do: - execute: - sequentially: - - processTicket: - switch: - - highPriority: - when: .ticket.priority == "high" - then: escalateToManager - - mediumPriority: - when: .ticket.priority == "medium" - then: assignToSpecialist - - lowPriority: - when: .ticket.priority == "low" - then: resolveTicket - - default: - then: raiseUndefinedPriorityError - - raiseUndefinedPriorityError: - raise: - error: - type: https://fake.com/errors/tickets/undefined-priority - status: 400 - title: Undefined Priority - - escalateToManager: {} - - assignToSpecialist: {} - - resolveTicket: {} + - processTicket: + switch: + - highPriority: + when: .ticket.priority == "high" + then: escalateToManager + - mediumPriority: + when: .ticket.priority == "medium" + then: assignToSpecialist + - lowPriority: + when: .ticket.priority == "low" + then: resolveTicket + - default: + then: raiseUndefinedPriorityError + - raiseUndefinedPriorityError: + raise: + error: + type: https://fake.com/errors/tickets/undefined-priority + status: 400 + instance: /raiseUndefinedPriorityError + title: Undefined Priority + - escalateToManager: + call: http + with: + method: post + endpoint: https://fake-ticketing-system.com/tickets/escalate + body: + ticketId: ${ .ticket.id } + - assignToSpecialist: + call: http + with: + method: post + endpoint: https://fake-ticketing-system.com/tickets/assign + body: + ticketId: ${ .ticket.id } + - resolveTicket: + call: http + with: + method: post + endpoint: https://fake-ticketing-system.com/tickets/resolve + body: + ticketId: ${ .ticket.id } ``` #### Run @@ -659,33 +702,33 @@ Provides the capability to execute external [containers](#container-process), [s document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: run-example version: '0.1.0' do: - execute: - sequentially: - - runContainer: - run: - container: - image: fake-image - - - runScript: - run: - script: - language: js - code: > - Some cool multiline script - - - runShell: - run: - shell: - command: 'echo "Hello, ${ .user.name }"' - - - runWorkflow: - run: - workflow: - reference: another-one:0.1.0 - input: {} + - runContainer: + run: + container: + image: fake-image + + - runScript: + run: + script: + language: js + code: > + Some cool multiline script + + - runShell: + run: + shell: + command: 'echo "Hello, ${ .user.name }"' + + - runWorkflow: + run: + workflow: + namespace: another-one + name: do-stuff + version: '0.1.0' + input: {} ``` ##### Container Process @@ -708,12 +751,13 @@ Enables the execution of external processes encapsulated within a containerized document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: run-container-example version: '0.1.0' do: - run: - container: - image: fake-image + - runContainer: + run: + container: + image: fake-image ``` ##### Script Process @@ -735,14 +779,15 @@ Enables the execution of custom scripts or code within a workflow, empowering wo document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: run-script-example version: '0.1.0' do: - run: - script: - language: js - code: > - Some cool multiline script + - runScript: + run: + script: + language: js + code: > + Some cool multiline script ``` ##### Shell Process @@ -763,12 +808,13 @@ Enables the execution of shell commands within a workflow, enabling workflows to document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: run-shell-example version: '0.1.0' do: - run: - shell: - command: 'echo "Hello, ${ .user.name }"' + - runShell: + run: + shell: + command: 'echo "Hello, ${ .user.name }"' ``` ##### Workflow Process @@ -789,14 +835,17 @@ Enables the invocation and execution of nested workflows within a parent workflo document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: run-workflow-example version: '0.1.0' do: - run: - workflow: - reference: another-one:0.1.0 - input: - foo: bar + - startWorkflow: + run: + workflow: + namespace: another-one + name: do-stuff + version: '0.1.0' + input: + foo: bar ``` #### Set @@ -815,13 +864,14 @@ A task used to set data. document: dsl: 1.0.0-alpha1 namespace: default - name: set + name: set-example version: '0.1.0' do: - set: - shape: circle - size: ${ .configuration.size } - fill: ${ .configuration.fill } + - setShape: + set: + shape: circle + size: ${ .configuration.size } + fill: ${ .configuration.fill } ``` #### Switch @@ -840,39 +890,62 @@ Enables conditional branching within workflows, allowing them to dynamically sel document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: switch-example version: '0.1.0' do: - execute: - sequentially: - - processOrder: - switch: - - case1: - when: .orderType == "electronic" - then: processElectronicOrder - - case2: - when: .orderType == "physical" - then: processPhysicalOrder - - default: - then: handleUnknownOrderType - - processElectronicOrder: - execute: - sequentially: - - validatePayment: {} - - fulfillOrder: {} - then: exit - - processPhysicalOrder: - execute: - sequentially: - - checkInventory: {} - - packItems: {} - - scheduleShipping: {} - then: exit - - handleUnknownOrderType: - execute: - sequentially: - - logWarning: {} - - notifyAdmin: {} + - processOrder: + switch: + - case1: + when: .orderType == "electronic" + then: processElectronicOrder + - case2: + when: .orderType == "physical" + then: processPhysicalOrder + - default: + then: handleUnknownOrderType + - processElectronicOrder: + do: + - validatePayment: + call: http + with: + method: post + endpoint: https://fake-payment-service.com/validate + - fulfillOrder: + call: http + with: + method: post + endpoint: https://fake-fulfillment-service.com/fulfill + then: exit + - processPhysicalOrder: + do: + - checkInventory: + call: http + with: + method: get + endpoint: https://fake-inventory-service.com/inventory + - packItems: + call: http + with: + method: post + endpoint: https://fake-packaging-service.com/pack + - scheduleShipping: + call: http + with: + method: post + endpoint: https://fake-shipping-service.com/schedule + then: exit + - handleUnknownOrderType: + do: + - logWarning: + call: http + with: + method: post + endpoint: https://fake-logging-service.com/warn + - notifyAdmin: + call: http + with: + method: post + endpoint: https://fake-notification-service.com/notify ``` ##### Switch Case @@ -892,7 +965,7 @@ Serves as a mechanism within workflows to handle errors gracefully, potentially | Name | Type | Required | Description| |:--|:---:|:---:|:---| -| try | [`task`](#task) | `yes` | The task(s) to perform. | +| try | [`map[string, task][]`](#task) | `yes` | The task(s) to perform. | | catch | [`catch`](#catch) | `yes` | Configures the errors to catch and how to handle them. | ##### Examples @@ -901,28 +974,30 @@ Serves as a mechanism within workflows to handle errors gracefully, potentially document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: try-example version: '0.1.0' do: - try: - call: http - with: - method: get - endpoint: https:// - catch: - errors: - with: - type: https://serverlessworkflow.io.io/dsl/errors/types/communication - status: 503 - as: error - retry: - delay: - seconds: 3 - backoff: - exponential: {} - limit: - attempt: - count: 5 + - trySomething: + try: + - invalidHttpCall: + call: http + with: + method: get + endpoint: https:// + catch: + errors: + with: + type: https://serverlessworkflow.io.io/dsl/errors/types/communication + status: 503 + as: error + retry: + delay: + seconds: 3 + backoff: + exponential: {} + limit: + attempt: + count: 5 ``` ##### Catch @@ -938,7 +1013,7 @@ Defines the configuration of a catch clause, which a concept used to catch error | when | `string`| `no` | A runtime expression used to determine whether or not to catch the filtered error | | exceptWhen | `string` | `no` | A runtime expression used to determine whether or not to catch the filtered error | | retry | [`retryPolicy`](#retry) | `no` | The retry policy to use, if any, when catching errors | -| do | [`task`](#task) | `no` | The definition of the task to run when catching an error | +| do | [`map[string, task][]`](#task) | `no` | The definition of the task(s) to run when catching an error | #### Wait @@ -956,11 +1031,12 @@ Allows workflows to pause or delay their execution for a specified period of tim document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: wait-example version: '0.1.0' do: - wait: - seconds: 10 + - waitAWhile: + wait: + seconds: 10 ``` ### Flow Directive @@ -1017,21 +1093,22 @@ Defines the mechanism used to authenticate users and workflows attempting to acc document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: authentication-example version: '0.1.0' use: secrets: - usernamePasswordSecret: {} + - usernamePasswordSecret authentication: sampleBasicFromSecret: basic: usernamePasswordSecret do: - call: http - with: - method: get - endpoint: - uri: https://secured.fake.com/sample - authentication: sampleBasicFromSecret + - sampleTask: + call: http + with: + method: get + endpoint: + uri: https://secured.fake.com/sample + authentication: sampleBasicFromSecret ``` #### Basic Authentication @@ -1051,7 +1128,7 @@ Defines the fundamentals of a 'basic' authentication. document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: basic-authentication-example version: '0.1.0' use: authentication: @@ -1060,12 +1137,13 @@ use: username: admin password: 123 do: - call: http - with: - method: get - endpoint: - uri: https://secured.fake.com/sample - authentication: sampleBasic + - sampleTask: + call: http + with: + method: get + endpoint: + uri: https://secured.fake.com/sample + authentication: sampleBasic ``` #### Bearer Authentication @@ -1084,17 +1162,18 @@ Defines the fundamentals of a 'bearer' authentication document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: bearer-authentication-example version: '0.1.0' do: - call: http - with: - method: get - endpoint: - uri: https://secured.fake.com/sample - authentication: - bearer: - token: ${ .user.token } + - sampleTask: + call: http + with: + method: get + endpoint: + uri: https://secured.fake.com/sample + authentication: + bearer: + token: ${ .user.token } ``` #### Certificate Authentication @@ -1128,23 +1207,24 @@ Defines the fundamentals of an 'oauth2' authentication document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: oauth2-authentication-example version: '0.1.0' do: - call: http - with: - method: get - endpoint: - uri: https://secured.fake.com/sample - authentication: - oauth2: - authority: http://keycloak/realms/fake-authority/.well-known/openid-configuration - grant: client-credentials - client: - id: workflow-runtime - secret: ********** - scopes: [ api ] - audiences: [ runtime ] + - sampleTask: + call: http + with: + method: get + endpoint: + uri: https://secured.fake.com/sample + authentication: + oauth2: + authority: http://keycloak/realms/fake-authority/.well-known/openid-configuration + grant: client-credentials + client: + id: workflow-runtime + secret: "**********" + scopes: [ api ] + audiences: [ runtime ] ``` ##### OAUTH2 Token @@ -1170,8 +1250,8 @@ Extensions enable the execution of tasks prior to those they extend, offering th |----------|:----:|:--------:|-------------| | extend | `string` | `yes` | The type of task to extend
Supported values are: `call`, `composite`, `emit`, `extension`, `for`, `listen`, `raise`, `run`, `set`, `switch`, `try`, `wait` and `all` | | when | `string` | `no` | A runtime expression used to determine whether or not the extension should apply in the specified context | -| before | [`task`](#task) | `no` | The task to execute, if any, before the extended task | -| after | [`task`](#task) | `no` | The task to execute, if any, after the extended task | +| before | [`map[string, task][]`](#task) | `no` | The task to execute, if any, before the extended task | +| after | [`map[string, task][]`](#task) | `no` | The task to execute, if any, after the extended task | #### Examples @@ -1180,31 +1260,34 @@ Extensions enable the execution of tasks prior to those they extend, offering th document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: logging-extension-example version: '0.1.0' use: extensions: - logging: extend: all before: - call: http - with: - method: post - endpoint: https://fake.log.collector.com - body: - message: "${ \"Executing task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + endpoint: https://fake.log.collector.com + body: + message: ${ "Executing task '\($task.reference)'..." } after: - call: http - with: - method: post - endpoint: https://fake.log.collector.com - body: - message: "${ \"Executed task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + endpoint: https://fake.log.collector.com + body: + message: ${ "Executed task '\($task.reference)'..." } do: - call: http - with: - method: get - endpoint: https://fake.com/sample + - sampleTask: + call: http + with: + method: get + endpoint: https://fake.com/sample ``` *Intercept HTTP calls to 'https://mocked.service.com' and mock its response:* @@ -1212,27 +1295,29 @@ do: document: dsl: '1.0.0-alpha1' namespace: test - name: sample-workflow + name: intercept-extension-example version: '0.1.0' use: extensions: - mockService: - extend: http - when: ($task.with.uri != null and ($task.with.uri | startswith("https://mocked.service.com"))) or ($task.with.endpoint.uri != null and ($task.with.endpoint.uri | startswith("https://mocked.service.com"))) + extend: call + when: $task.call == "http" and ($task.with.uri != null and ($task.with.uri | startswith("https://mocked.service.com"))) or ($task.with.endpoint.uri != null and ($task.with.endpoint.uri | startswith("https://mocked.service.com"))) before: - set: - statusCode: 200 - headers: - Content-Type: application/json - content: - foo: - bar: baz - then: exit #using this, we indicate to the workflow we want to exit the extended task, thus just returning what we injected + - intercept: + set: + statusCode: 200 + headers: + Content-Type: application/json + content: + foo: + bar: baz + then: exit #using this, we indicate to the workflow we want to exit the extended task, thus just returning what we injected do: - call: http - with: - method: get - endpoint: https://fake.com/sample + - sampleTask: + call: http + with: + method: get + endpoint: https://fake.com/sample ``` ### Error @@ -1497,11 +1582,12 @@ Defines a workflow or task timeout. document: dsl: '1.0.0-alpha1' namespace: default - name: sample + name: timeout-example version: '0.1.0' do: - wait: - seconds: 60 + - waitAMinute: + wait: + seconds: 60 timeout: after: seconds: 30 diff --git a/dsl.md b/dsl.md index b481d9c5..bc1311ac 100644 --- a/dsl.md +++ b/dsl.md @@ -101,7 +101,8 @@ Serverless Workflow DSL allows for defining reusable components that can be refe The Serverless Workflow DSL defines several default [task](dsl-reference.md#tasks) types that runtimes **must** implement: - [Call](dsl-reference.md#call), used to call services and/or functions. -- [Composite](dsl-reference.md#composite), used to define a minimum of two subtasks to perform. +- [Do](dsl-reference.md#do), used to define one or more subtasks to perform in sequence. +- [Fork](dsl-reference.md#fork), used to define one or more two subtasks to perform in parallel. - [Emit](dsl-reference.md#emit), used to emit [events](dsl-reference.md#event). - [For](dsl-reference.md#for), used to iterate over a collection of items, and conditionally perform a task for each of them. - [Listen](dsl-reference.md#listen), used to listen for an [event](dsl-reference.md#event) or more. @@ -342,9 +343,10 @@ document: version: '0.1.0' do: - call: https://github.com/myorg/functions/validateEmailAddress@v1 - with: - emailAddress: ${ .userEmail } + - validateEmail: + call: https://github.com/myorg/functions/validateEmailAddress@v1 + with: + emailAddress: ${ .userEmail } ``` ##### Publishing a Custom Function @@ -411,24 +413,27 @@ use: logging: extend: all before: - call: http - with: - method: post - uri: https://fake.log.collector.com - body: - message: "${ \"Executing task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + uri: https://fake.log.collector.com + body: + message: "${ \"Executing task '\($task.reference)'...\" }" after: - call: http - with: - method: post - uri: https://fake.log.collector.com - body: - message: "${ \"Executed task '\($task.reference)'...\" }" + - sendLog: + call: http + with: + method: post + uri: https://fake.log.collector.com + body: + message: "${ \"Executed task '\($task.reference)'...\" }" do: - call: http - with: - method: get - uri: https://fake.com/sample + - sampleTask: + call: http + with: + method: get + uri: https://fake.com/sample ``` ### External Resources diff --git a/examples/accumulate-room-readings.yaml b/examples/accumulate-room-readings.yaml index f4f8e678..09bdf4d3 100644 --- a/examples/accumulate-room-readings.yaml +++ b/examples/accumulate-room-readings.yaml @@ -4,45 +4,43 @@ document: name: accumulate-room-readings version: 1.0.0-alpha1 do: - execute: - sequentially: - - consumeReading: - listen: - to: - all: - - with: - source: https://my.home.com/sensor - type: my.home.sensors.temperature - correlate: - roomId: - from: .roomid - output: - as: .data.reading - - with: - source: https://my.home.com/sensor - type: my.home.sensors.humidity - correlate: - roomId: - from: .roomid - output: - as: .data.reading - as: readings - - logReading: - for: - each: reading - in: .readings - do: + - consumeReading: + listen: + to: + all: + - with: + source: https://my.home.com/sensor + type: my.home.sensors.temperature + correlate: + roomId: + from: .roomid + output: + as: .data.reading + - with: + source: https://my.home.com/sensor + type: my.home.sensors.humidity + correlate: + roomId: + from: .roomid + output: + as: .data.reading + - logReading: + for: + each: reading + in: .readings + do: + - callOrderService: call: openapi with: document: uri: http://myorg.io/ordersservices.json operationId: logreading - - generateReport: - call: openapi - with: - document: - uri: http://myorg.io/ordersservices.json - operationId: produceReport + - generateReport: + call: openapi + with: + document: + uri: http://myorg.io/ordersservices.json + operationId: produceReport timeout: after: - hours: 1 \ No newline at end of file + hours: 1 diff --git a/examples/asyncapi.yaml b/examples/asyncapi.yaml index ca6ff26d..179a77af 100644 --- a/examples/asyncapi.yaml +++ b/examples/asyncapi.yaml @@ -4,16 +4,17 @@ document: name: bearer-auth version: 1.0.0-alpha1 do: - call: asyncapi - with: - document: - uri: https://fake.com/docs/asyncapi.json - operationRef: findPetsByStatus - server: staging - message: getPetByStatusQuery - binding: http - payload: - petId: ${ .pet.id } - authentication: - bearer: - token: ${ .token } + - findPet: + call: asyncapi + with: + document: + uri: https://fake.com/docs/asyncapi.json + operationRef: findPetsByStatus + server: staging + message: getPetByStatusQuery + binding: http + payload: + petId: ${ .pet.id } + authentication: + bearer: + token: ${ .token } diff --git a/examples/bearer-auth-uri-format.yaml b/examples/bearer-auth-uri-format.yaml index 2a0700ec..44149c0b 100644 --- a/examples/bearer-auth-uri-format.yaml +++ b/examples/bearer-auth-uri-format.yaml @@ -4,11 +4,12 @@ document: name: bearer-auth-uri-format version: 1.0.0-alpha1 do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/1 - authentication: - bearer: - token: ${ .token } + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/1 + authentication: + bearer: + token: ${ .token } diff --git a/examples/bearer-auth.yaml b/examples/bearer-auth.yaml index 46e72bf1..03f6c883 100644 --- a/examples/bearer-auth.yaml +++ b/examples/bearer-auth.yaml @@ -4,11 +4,12 @@ document: name: bearer-auth version: 1.0.0-alpha1 do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/{petId} - authentication: - bearer: - token: ${ .token } + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/{petId} + authentication: + bearer: + token: ${ .token } diff --git a/examples/call-http-shorthand-endpoint.yaml b/examples/call-http-shorthand-endpoint.yaml index 52b606b2..9be80066 100644 --- a/examples/call-http-shorthand-endpoint.yaml +++ b/examples/call-http-shorthand-endpoint.yaml @@ -4,7 +4,8 @@ document: name: call-http-shorthand-endpoint version: 1.0.0-alpha1 do: - call: http - with: - method: get - endpoint: https://petstore.swagger.io/v2/pet/{petId} + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} diff --git a/examples/do-single.yaml b/examples/do-single.yaml index 52b606b2..9be80066 100644 --- a/examples/do-single.yaml +++ b/examples/do-single.yaml @@ -4,7 +4,8 @@ document: name: call-http-shorthand-endpoint version: 1.0.0-alpha1 do: - call: http - with: - method: get - endpoint: https://petstore.swagger.io/v2/pet/{petId} + - getPet: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} diff --git a/examples/mock-service-extension.yaml b/examples/mock-service-extension.yaml index 4a6c7724..9cbb0a1a 100644 --- a/examples/mock-service-extension.yaml +++ b/examples/mock-service-extension.yaml @@ -1,25 +1,27 @@ -document: - dsl: '1.0.0-alpha1' +document: + dsl: 1.0.0-alpha1 namespace: test name: sample-workflow - version: '0.1.0' + version: 0.1.0 use: extensions: - mockService: extend: call when: ($task.with.endpoint != null and ($task.with.endpoint | startswith("https://mocked.service.com"))) or ($task.with.endpoint.uri != null and ($task.with.endpoint.uri | startswith("https://mocked.service.com"))) before: - set: - statusCode: 200 - headers: - Content-Type: application/json - content: - foo: - bar: baz - then: exit #using this, we indicate to the workflow we want to exit the extended task, thus just returning what we injected + - mockResponse: + set: + statusCode: 200 + headers: + Content-Type: application/json + content: + foo: + bar: baz + then: exit #using this, we indicate to the workflow we want to exit the extended task, thus just returning what we injected do: - call: http - with: - method: get - endpoint: - uri: https://fake.com/sample \ No newline at end of file + - callHttp: + call: http + with: + method: get + endpoint: + uri: https://fake.com/sample diff --git a/examples/switch-then-string.yaml b/examples/switch-then-string.yaml index 99c69ea1..881bab50 100644 --- a/examples/switch-then-string.yaml +++ b/examples/switch-then-string.yaml @@ -1,50 +1,45 @@ document: - dsl: '1.0.0-alpha1' + dsl: 1.0.0-alpha1 namespace: test name: sample-workflow - version: '0.1.0' + version: 0.1.0 do: - execute: - sequentially: - - processOrder: - switch: - - case1: - when: .orderType == "electronic" - then: processElectronicOrder - - case2: - when: .orderType == "physical" - then: processPhysicalOrder - - default: - then: handleUnknownOrderType - - processElectronicOrder: - execute: - sequentially: - - validatePayment: - set: - validate: true - - fulfillOrder: - set: - status: fulfilled - then: exit - - processPhysicalOrder: - execute: - sequentially: - - checkInventory: - set: - inventory: clear - - packItems: - set: - items: 1 - - scheduleShipping: - set: - address: Elmer St - then: exit - - handleUnknownOrderType: - execute: - sequentially: - - logWarning: - set: - log: warn - - notifyAdmin: - set: - message: something's wrong \ No newline at end of file + - processOrder: + switch: + - case1: + when: .orderType == "electronic" + then: processElectronicOrder + - case2: + when: .orderType == "physical" + then: processPhysicalOrder + - default: + then: handleUnknownOrderType + - processElectronicOrder: + do: + - validatePayment: + set: + validate: true + - fulfillOrder: + set: + status: fulfilled + then: exit + - processPhysicalOrder: + do: + - checkInventory: + set: + inventory: clear + - packItems: + set: + items: 1 + - scheduleShipping: + set: + address: Elmer St + then: exit + - handleUnknownOrderType: + do: + - logWarning: + set: + log: warn + - notifyAdmin: + set: + message: something's wrong diff --git a/examples/use-authentication.yaml b/examples/use-authentication.yaml index 613764df..1b90d427 100644 --- a/examples/use-authentication.yaml +++ b/examples/use-authentication.yaml @@ -9,9 +9,10 @@ use: bearer: token: ${ .token } do: - call: http - with: - method: get - endpoint: - uri: https://petstore.swagger.io/v2/pet/{petId} - authentication: petStoreAuth + - getPet: + call: http + with: + method: get + endpoint: + uri: https://petstore.swagger.io/v2/pet/{petId} + authentication: petStoreAuth diff --git a/schema/workflow.yaml b/schema/workflow.yaml index f03ebe18..81fd43d4 100644 --- a/schema/workflow.yaml +++ b/schema/workflow.yaml @@ -1,5 +1,5 @@ -$id: https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.json -$schema: http://json-schema.org/draft-07/schema +$id: https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.yaml +$schema: https://json-schema.org/draft/2020-12/schema description: Serverless Workflow DSL - Workflow Schema type: object properties: @@ -76,8 +76,8 @@ properties: description: The workflow's secrets. description: Defines the workflow's reusable components. do: - description: Defines the task the workflow must perform - $ref: '#/$defs/task' + description: Defines the task(s) the workflow must perform + $ref: '#/$defs/taskList' timeout: $ref: '#/$defs/timeout' description: The workflow's timeout configuration, if any. @@ -101,7 +101,15 @@ properties: description: Specifies the events that trigger the workflow execution. description: Schedules the workflow $defs: - task: + taskList: + type: array + items: + type: object + minProperties: 1 + maxProperties: 1 + additionalProperties: + $ref: '#/$defs/task' + taskBase: type: object properties: input: @@ -119,9 +127,12 @@ $defs: then: $ref: '#/$defs/flowDirective' description: The flow directive to be performed upon completion of the task. + task: + unevaluatedProperties: false oneOf: - $ref: '#/$defs/callTask' - - $ref: '#/$defs/compositeTask' + - $ref: '#/$defs/doTask' + - $ref: '#/$defs/forkTask' - $ref: '#/$defs/emitTask' - $ref: '#/$defs/forTask' - $ref: '#/$defs/listenTask' @@ -132,9 +143,13 @@ $defs: - $ref: '#/$defs/tryTask' - $ref: '#/$defs/waitTask' callTask: - type: object oneOf: - - properties: + - title: CallAsyncAPI + $ref: '#/$defs/taskBase' + type: object + required: [ call, with ] + unevaluatedProperties: false + properties: call: type: string const: asyncapi @@ -165,9 +180,14 @@ $defs: - $ref: '#/$defs/authenticationPolicy' - type: string required: [ document, operationRef ] + additionalProperties: false description: Defines the AsyncAPI call to perform. + - title: CallGRPC + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false required: [ call, with ] - - properties: + properties: call: type: string const: grpc @@ -206,9 +226,14 @@ $defs: additionalProperties: true description: The arguments, if any, to call the method with. required: [ proto, service, method ] + additionalProperties: false description: Defines the GRPC call to perform. + - title: CallHTTP + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false required: [ call, with ] - - properties: + properties: call: type: string const: http @@ -234,9 +259,14 @@ $defs: enum: [ raw, content, response ] description: The http call output format. Defaults to 'content'. required: [ method, endpoint ] + additionalProperties: false description: Defines the HTTP call to perform. + - title: CallOpenAPI + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false required: [ call, with ] - - properties: + properties: call: type: string const: openapi @@ -263,9 +293,14 @@ $defs: enum: [ raw, content, response ] description: The http call output format. Defaults to 'content'. required: [ document, operationId ] + additionalProperties: false description: Defines the OpenAPI call to perform. - required: [ call, with ] - - properties: + - title: CallFunction + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ call ] + properties: call: type: string not: @@ -275,46 +310,38 @@ $defs: type: object additionalProperties: true description: A name/value mapping of the parameters, if any, to call the function with. - required: [ call ] - compositeTask: - type: object - required: [ execute ] - description: Serves as a pivotal orchestrator within workflow systems, enabling the seamless integration and execution of multiple subtasks to accomplish complex operations + forkTask: + description: Allows workflows to execute multiple tasks concurrently and optionally race them against each other, with a single possible winner, which sets the task's output. + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ fork ] properties: - execute: + fork: type: object - description: Configures the task execution strategy to use - oneOf: - - required: [ concurrently ] - properties: - concurrently: - description: A list of the tasks to perform concurrently. - type: array - minItems: 2 - items: - type: object - minProperties: 1 - maxProperties: 1 - additionalProperties: - $ref: '#/$defs/task' - compete: - description: Indicates whether or not the concurrent tasks are racing against each other, with a single possible winner, which sets the composite task's output. - type: boolean - default: false - - required: [ sequentially ] - properties: - sequentially: - description: A list of the tasks to perform sequentially. - type: array - minItems: 2 - items: - type: object - minProperties: 1 - maxProperties: 1 - additionalProperties: - $ref: '#/$defs/task' + required: [ branches ] + properties: + branches: + $ref: '#/$defs/taskList' + compete: + description: Indicates whether or not the concurrent tasks are racing against each other, with a single possible winner, which sets the composite task's output. + type: boolean + default: false + doTask: + description: Allows to execute a list of tasks in sequence + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ do ] + properties: + do: + $ref: '#/$defs/taskList' emitTask: + description: Allows workflows to publish events to event brokers or messaging systems, facilitating communication and coordination between different components and services. + $ref: '#/$defs/taskBase' type: object + required: [ emit ] + unevaluatedProperties: false properties: emit: type: object @@ -346,17 +373,12 @@ $defs: required: [ source, type ] additionalProperties: true required: [ event ] - required: [ emit ] - description: Allows workflows to publish events to event brokers or messaging systems, facilitating communication and coordination between different components and services. - flowDirective: - additionalProperties: false - anyOf: - - type: string - enum: [ continue, exit, end ] - default: continue - - type: string forTask: + description: Allows workflows to iterate over a collection of items, executing a defined set of subtasks for each item in the collection. This task type is instrumental in handling scenarios such as batch processing, data transformation, and repetitive operations across datasets. + $ref: '#/$defs/taskBase' type: object + required: [ for, do ] + unevaluatedProperties: false properties: for: type: object @@ -377,11 +399,13 @@ $defs: type: string description: A runtime expression that represents the condition, if any, that must be met for the iteration to continue. do: - $ref: '#/$defs/task' - description: Allows workflows to iterate over a collection of items, executing a defined set of subtasks for each item in the collection. This task type is instrumental in handling scenarios such as batch processing, data transformation, and repetitive operations across datasets. - required: [ for, do ] + $ref: '#/$defs/taskList' listenTask: + description: Provides a mechanism for workflows to await and react to external events, enabling event-driven behavior within workflow systems. + $ref: '#/$defs/taskBase' type: object + required: [ listen ] + unevaluatedProperties: false properties: listen: type: object @@ -390,10 +414,12 @@ $defs: $ref: '#/$defs/eventConsumptionStrategy' description: Defines the event(s) to listen to. required: [ to ] - required: [ listen ] - description: Provides a mechanism for workflows to await and react to external events, enabling event-driven behavior within workflow systems. raiseTask: + description: Intentionally triggers and propagates errors. + $ref: '#/$defs/taskBase' type: object + required: [ raise ] + unevaluatedProperties: false properties: raise: type: object @@ -402,10 +428,12 @@ $defs: $ref: '#/$defs/error' description: Defines the error to raise. required: [ error ] - required: [ raise ] - description: Intentionally triggers and propagates errors. runTask: + description: Provides the capability to execute external containers, shell commands, scripts, or workflows. + $ref: '#/$defs/taskBase' type: object + required: [ run ] + unevaluatedProperties: false properties: run: type: object @@ -444,16 +472,16 @@ $defs: additionalProperties: true description: A key/value mapping of the environment variables, if any, to use when running the configured process. oneOf: - - properties: - code: - type: string - required: [ code ] - description: The script's code. - - properties: - source: - $ref: '#/$defs/externalResource' - description: The script's resource. - required: [ code ] + - properties: + code: + type: string + required: [ code ] + description: The script's code. + - properties: + source: + $ref: '#/$defs/externalResource' + description: The script's resource. + required: [ source ] required: [ language ] required: [ script ] description: Enables the execution of custom scripts or code within a workflow, empowering workflows to perform specialized logic, data processing, or integration tasks by executing user-defined scripts written in various programming languages. @@ -496,20 +524,24 @@ $defs: required: [ namespace, name, version ] required: [ workflow ] description: Enables the invocation and execution of nested workflows within a parent workflow, facilitating modularization, reusability, and abstraction of complex logic or business processes by encapsulating them into standalone workflow units. - required: [ run ] - description: Provides the capability to execute external containers, shell commands, scripts, or workflows. setTask: + description: A task used to set data + $ref: '#/$defs/taskBase' type: object + required: [ set ] + unevaluatedProperties: false properties: set: type: object minProperties: 1 additionalProperties: true description: The data to set - required: [ set ] - description: A task used to set data switchTask: + description: Enables conditional branching within workflows, allowing them to dynamically select different paths based on specified conditions or criteria + $ref: '#/$defs/taskBase' type: object + required: [ switch ] + unevaluatedProperties: false properties: switch: type: array @@ -530,14 +562,16 @@ $defs: then: $ref: '#/$defs/flowDirective' description: The flow directive to execute when the case matches. - required: [ switch ] - description: Enables conditional branching within workflows, allowing them to dynamically select different paths based on specified conditions or criteria tryTask: + description: Serves as a mechanism within workflows to handle errors gracefully, potentially retrying failed tasks before proceeding with alternate ones. + $ref: '#/$defs/taskBase' type: object + required: [ try, catch ] + unevaluatedProperties: false properties: try: - description: The task to perform. - $ref: '#/$defs/task' + description: The task(s) to perform. + $ref: '#/$defs/taskList' catch: type: object properties: @@ -556,18 +590,25 @@ $defs: $ref: '#/$defs/retryPolicy' description: The retry policy to use, if any, when catching errors. do: - description: The definition of the task to run when catching an error. - $ref: '#/$defs/task' - required: [ try, catch ] - description: Serves as a mechanism within workflows to handle errors gracefully, potentially retrying failed tasks before proceeding with alternate ones. + description: The definition of the task(s) to run when catching an error. + $ref: '#/$defs/taskList' waitTask: + description: Allows workflows to pause or delay their execution for a specified period of time. + $ref: '#/$defs/taskBase' type: object + required: [ wait ] + unevaluatedProperties: false properties: wait: - $ref: '#/$defs/duration' description: The amount of time to wait. - required: [ wait ] - description: Allows workflows to pause or delay their execution for a specified period of time. + $ref: '#/$defs/duration' + flowDirective: + additionalProperties: false + anyOf: + - type: string + enum: [ continue, exit, end ] + default: continue + - type: string authenticationPolicy: type: object oneOf: @@ -683,7 +724,7 @@ $defs: description: The status code generated by the origin for this occurrence of the error. instance: type: string - format: uri + format: json-pointer description: A JSON Pointer used to reference the component the error originates from. title: type: string @@ -780,29 +821,32 @@ $defs: type: string description: A runtime expression, if any, used to determine whether or not the extension should apply in the specified context. before: - description: The task to execute before the extended task, if any. - $ref: '#/$defs/task' + description: The task(s) to execute before the extended task, if any. + $ref: '#/$defs/taskList' after: - description: The task to execute after the extended task, if any. - $ref: '#/$defs/task' + description: The task(s) to execute after the extended task, if any. + $ref: '#/$defs/taskList' required: [ extend ] description: The definition of a an extension. externalResource: - type: object - properties: - uri: - type: string + oneOf: + - type: string format: uri - description: The endpoint's URI. - authentication: - description: The authentication policy to use. - oneOf: - - $ref: '#/$defs/authenticationPolicy' - - type: string - name: - type: string - description: The external resource's name, if any. - required: [ uri ] + - type: object + properties: + uri: + type: string + format: uri + description: The endpoint's URI. + authentication: + description: The authentication policy to use. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string + name: + type: string + description: The external resource's name, if any. + required: [ uri ] input: type: object properties: