From eadb677af89703f12444376e03bde8c4280d64ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Sat, 24 Aug 2024 16:50:36 -0300 Subject: [PATCH 1/6] proposal sdk-go to v1.0.0-alpha2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- Makefile | 3 +- builder/builder.go | 44 +- builder/builder_test.go | 120 - builder/call.go | 51 + builder/do.go | 59 + builder/document.go | 73 + model/common.go => builder/duration.go | 29 +- .../map.go | 25 +- .../use.go | 17 +- builder/wait.go | 44 + builder/workflow.go | 92 + example/example.go | 99 + .../auth.yaml => example/example1.yaml | 22 +- .../example2.yaml | 33 +- go.mod | 20 +- go.sum | 61 +- graph/graph.go | 226 ++ graph/graph_test.go | 127 + graph/marshal.go | 60 + graph/unmarshal.go | 167 + hack/conv/main.go | 2 +- internal/dsl/default.go | 70 + internal/dsl/dsl.go | 1041 ++++++ parser/parser.go => internal/load/load.go | 65 +- internal/util/path.go | 53 + model/doc.go => internal/util/webassembly.go | 13 +- .../validator/errors.go | 26 +- .../validator/integrity.go | 12 +- internal/validator/validator.go | 71 + kubernetes/k8s_workflow_integration.go | 3 +- model/action.go | 125 - model/action_data_filter.go | 50 - model/action_data_filter_test.go | 83 - model/action_test.go | 83 - model/action_validator.go | 58 - model/action_validator_test.go | 200 -- model/auth.go | 221 -- model/auth_test.go | 89 - model/auth_validator_test.go | 210 -- model/callback_state.go | 60 - model/callback_state_validator_test.go | 116 - model/delay_state.go | 33 - model/delay_state_test.go | 15 - model/delay_state_validator_test.go | 68 - model/event.go | 130 - model/event_data_filter.go | 46 - model/event_data_filter_test.go | 81 - model/event_state.go | 109 - model/event_state_test.go | 152 - model/event_state_validator.go | 39 - model/event_state_validator_test.go | 189 - model/event_test.go | 120 - model/event_validator.go | 40 - model/event_validator_test.go | 216 -- model/foreach_state.go | 108 - model/foreach_state_test.go | 70 - model/foreach_state_validator.go | 45 - model/foreach_state_validator_test.go | 121 - model/function.go | 95 - model/function_validator_test.go | 74 - model/inject_state.go | 48 - model/operation_state.go | 71 - model/operation_state_test.go | 72 - model/operation_state_validator_test.go | 121 - model/parallel_state.go | 123 - model/parallel_state_test.go | 67 - model/parallel_state_validator.go | 39 - model/parallel_state_validator_test.go | 252 -- model/retry.go | 57 - model/retry_test.go | 15 - model/retry_validator.go | 40 - model/retry_validator_test.go | 91 - model/sleep_state.go | 48 - model/sleep_state_test.go | 15 - model/sleep_state_validator_test.go | 95 - model/state_exec_timeout.go | 34 - model/state_exec_timeout_test.go | 113 - model/state_exec_timeout_validator_test.go | 95 - model/states.go | 281 -- model/states_validator.go | 51 - model/states_validator_test.go | 151 - model/switch_state.go | 150 - model/switch_state_test.go | 110 - model/switch_state_validator.go | 64 - model/switch_state_validator_test.go | 274 -- model/workflow.go | 591 ---- model/workflow_ref.go | 72 - model/workflow_ref_test.go | 105 - model/workflow_ref_validator_test.go | 68 - model/workflow_test.go | 677 ---- model/workflow_validator.go | 247 -- model/workflow_validator_test.go | 544 --- model/zz_generated.buildergen.go | 3139 ----------------- model/zz_generated.deepcopy.go | 1837 ---------- {model => object}/object.go | 16 +- {model => object}/object_test.go | 4 +- parser/parser_test.go | 1102 ------ .../testdata/applicationrequestfunctions.json | 8 - .../testdata/applicationrequestretries.json | 9 - parser/testdata/constantsDogs.json | 9 - parser/testdata/datainputschema.json | 16 - parser/testdata/errors.json | 13 - parser/testdata/eventbasedgreetingevents.json | 9 - parser/testdata/eventdefs.yml | 22 - parser/testdata/functiondefs.json | 16 - parser/testdata/secrets.json | 6 - parser/testdata/timeouts.json | 6 - .../workflows/VetAppointmentWorkflow.json | 45 - .../workflows/actiondata-defaultvalue.yaml | 34 - .../applicationrequest-issue103.json | 79 - .../applicationrequest-issue16.sw.yaml | 48 - .../workflows/applicationrequest-issue69.json | 79 - .../workflows/applicationrequest.json | 75 - .../applicationrequest.multiauth.json | 104 - .../workflows/applicationrequest.rp.json | 69 - .../workflows/applicationrequest.url.json | 69 - parser/testdata/workflows/checkInbox.json | 53 - parser/testdata/workflows/checkcarvitals.json | 60 - .../workflows/checkinbox.cron-test.sw.yaml | 45 - parser/testdata/workflows/checkinbox.sw.yaml | 49 - parser/testdata/workflows/compensate.sw.json | 99 - .../workflows/conditionbasedstate.yaml | 40 - .../workflows/continue-as-example.yaml | 58 - .../customerbankingtransactions.json | 43 - .../workflows/customercreditcheck.json | 96 - .../workflows/dataInputSchemaObject.json | 56 - .../workflows/eventbaseddataandswitch.sw.json | 107 - .../workflows/eventbasedgreeting.sw.json | 52 - .../workflows/eventbasedgreeting.sw.p.json | 49 - .../eventbasedgreetingexclusive.sw.json | 83 - .../eventbasedgreetingnonexclusive.sw.json | 62 - .../workflows/eventbasedswitch.sw.json | 92 - .../workflows/eventbasedswitchstate.json | 70 - .../testdata/workflows/fillglassofwater.json | 48 - .../workflows/finalizeCollegeApplication.json | 74 - .../greetings-constants-file.sw.yaml | 40 - .../workflows/greetings-secret-file.sw.yaml | 40 - .../workflows/greetings-secret.sw.yaml | 41 - .../workflows/greetings-v08-spec.sw.yaml | 273 -- parser/testdata/workflows/greetings.sw.json | 34 - parser/testdata/workflows/greetings.sw.yaml | 40 - .../workflows/greetings_sleep.sw.json | 47 - .../workflows/handleCarAuctionBid.json | 49 - parser/testdata/workflows/helloworld.json | 18 - parser/testdata/workflows/jobmonitoring.json | 127 - .../testdata/workflows/onboardcustomer.json | 25 - parser/testdata/workflows/parallelexec.json | 34 - .../workflows/patientVitalsWorkflow.json | 110 - .../workflows/patientonboarding.sw.yaml | 64 - .../workflows/paymentconfirmation.json | 96 - .../workflows/provisionorders.sw.json | 100 - .../workflows/purchaseorderworkflow.sw.json | 162 - .../roomreadings.timeouts.file.sw.json | 80 - .../workflows/roomreadings.timeouts.sw.json | 88 - .../workflows/sendcloudeventonprovision.json | 47 - .../testdata/workflows/sendcustomeremail.json | 32 - .../testdata/workflows/solvemathproblems.json | 37 - parser/testdata/workflows/urifiles/auth.json | 17 - parser/testdata/workflows/vitalscheck.json | 53 - .../applicationrequest-issue44.json | 85 - .../applicationrequest-issue74.json | 82 - ...pplicationrequest.auth.invalid.format.json | 85 - .../applicationrequest.authdupl.json | 96 - util/floatstr/floatstr.go | 105 - util/floatstr/floatstr_test.go | 109 - util/unmarshal.go | 335 -- util/unmarshal_test.go | 290 -- validate/validator.go | 50 + .../validator_test.go | 30 +- validator/validator.go | 120 - validator/validator_test.go | 228 -- validator/workflow.go | 154 - 172 files changed, 2505 insertions(+), 19523 deletions(-) delete mode 100644 builder/builder_test.go create mode 100644 builder/call.go create mode 100644 builder/do.go create mode 100644 builder/document.go rename model/common.go => builder/duration.go (53%) rename model/inject_state_validator_test.go => builder/map.go (55%) rename model/action_data_filter_validator_test.go => builder/use.go (67%) create mode 100644 builder/wait.go create mode 100644 builder/workflow.go create mode 100644 example/example.go rename parser/testdata/workflows/urifiles/auth.yaml => example/example1.yaml (64%) rename parser/testdata/workflows/dataInputSchemaValidation.yaml => example/example2.yaml (56%) create mode 100644 graph/graph.go create mode 100644 graph/graph_test.go create mode 100644 graph/marshal.go create mode 100644 graph/unmarshal.go create mode 100644 internal/dsl/default.go create mode 100644 internal/dsl/dsl.go rename parser/parser.go => internal/load/load.go (59%) create mode 100644 internal/util/path.go rename model/doc.go => internal/util/webassembly.go (75%) rename util/unmarshal_benchmark_test.go => internal/validator/errors.go (64%) rename validator/tags.go => internal/validator/integrity.go (75%) create mode 100644 internal/validator/validator.go delete mode 100644 model/action.go delete mode 100644 model/action_data_filter.go delete mode 100644 model/action_data_filter_test.go delete mode 100644 model/action_test.go delete mode 100644 model/action_validator.go delete mode 100644 model/action_validator_test.go delete mode 100644 model/auth.go delete mode 100644 model/auth_test.go delete mode 100644 model/auth_validator_test.go delete mode 100644 model/callback_state.go delete mode 100644 model/callback_state_validator_test.go delete mode 100644 model/delay_state.go delete mode 100644 model/delay_state_test.go delete mode 100644 model/delay_state_validator_test.go delete mode 100644 model/event.go delete mode 100644 model/event_data_filter.go delete mode 100644 model/event_data_filter_test.go delete mode 100644 model/event_state.go delete mode 100644 model/event_state_test.go delete mode 100644 model/event_state_validator.go delete mode 100644 model/event_state_validator_test.go delete mode 100644 model/event_test.go delete mode 100644 model/event_validator.go delete mode 100644 model/event_validator_test.go delete mode 100644 model/foreach_state.go delete mode 100644 model/foreach_state_test.go delete mode 100644 model/foreach_state_validator.go delete mode 100644 model/foreach_state_validator_test.go delete mode 100644 model/function.go delete mode 100644 model/function_validator_test.go delete mode 100644 model/inject_state.go delete mode 100644 model/operation_state.go delete mode 100644 model/operation_state_test.go delete mode 100644 model/operation_state_validator_test.go delete mode 100644 model/parallel_state.go delete mode 100644 model/parallel_state_test.go delete mode 100644 model/parallel_state_validator.go delete mode 100644 model/parallel_state_validator_test.go delete mode 100644 model/retry.go delete mode 100644 model/retry_test.go delete mode 100644 model/retry_validator.go delete mode 100644 model/retry_validator_test.go delete mode 100644 model/sleep_state.go delete mode 100644 model/sleep_state_test.go delete mode 100644 model/sleep_state_validator_test.go delete mode 100644 model/state_exec_timeout.go delete mode 100644 model/state_exec_timeout_test.go delete mode 100644 model/state_exec_timeout_validator_test.go delete mode 100644 model/states.go delete mode 100644 model/states_validator.go delete mode 100644 model/states_validator_test.go delete mode 100644 model/switch_state.go delete mode 100644 model/switch_state_test.go delete mode 100644 model/switch_state_validator.go delete mode 100644 model/switch_state_validator_test.go delete mode 100644 model/workflow.go delete mode 100644 model/workflow_ref.go delete mode 100644 model/workflow_ref_test.go delete mode 100644 model/workflow_ref_validator_test.go delete mode 100644 model/workflow_test.go delete mode 100644 model/workflow_validator.go delete mode 100644 model/workflow_validator_test.go delete mode 100644 model/zz_generated.buildergen.go delete mode 100644 model/zz_generated.deepcopy.go rename {model => object}/object.go (87%) rename {model => object}/object_test.go (97%) delete mode 100644 parser/parser_test.go delete mode 100644 parser/testdata/applicationrequestfunctions.json delete mode 100644 parser/testdata/applicationrequestretries.json delete mode 100644 parser/testdata/constantsDogs.json delete mode 100644 parser/testdata/datainputschema.json delete mode 100644 parser/testdata/errors.json delete mode 100644 parser/testdata/eventbasedgreetingevents.json delete mode 100644 parser/testdata/eventdefs.yml delete mode 100644 parser/testdata/functiondefs.json delete mode 100644 parser/testdata/secrets.json delete mode 100644 parser/testdata/timeouts.json delete mode 100644 parser/testdata/workflows/VetAppointmentWorkflow.json delete mode 100644 parser/testdata/workflows/actiondata-defaultvalue.yaml delete mode 100644 parser/testdata/workflows/applicationrequest-issue103.json delete mode 100644 parser/testdata/workflows/applicationrequest-issue16.sw.yaml delete mode 100644 parser/testdata/workflows/applicationrequest-issue69.json delete mode 100644 parser/testdata/workflows/applicationrequest.json delete mode 100644 parser/testdata/workflows/applicationrequest.multiauth.json delete mode 100644 parser/testdata/workflows/applicationrequest.rp.json delete mode 100644 parser/testdata/workflows/applicationrequest.url.json delete mode 100644 parser/testdata/workflows/checkInbox.json delete mode 100644 parser/testdata/workflows/checkcarvitals.json delete mode 100644 parser/testdata/workflows/checkinbox.cron-test.sw.yaml delete mode 100644 parser/testdata/workflows/checkinbox.sw.yaml delete mode 100644 parser/testdata/workflows/compensate.sw.json delete mode 100644 parser/testdata/workflows/conditionbasedstate.yaml delete mode 100644 parser/testdata/workflows/continue-as-example.yaml delete mode 100644 parser/testdata/workflows/customerbankingtransactions.json delete mode 100644 parser/testdata/workflows/customercreditcheck.json delete mode 100644 parser/testdata/workflows/dataInputSchemaObject.json delete mode 100644 parser/testdata/workflows/eventbaseddataandswitch.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreeting.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreeting.sw.p.json delete mode 100644 parser/testdata/workflows/eventbasedgreetingexclusive.sw.json delete mode 100644 parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json delete mode 100644 parser/testdata/workflows/eventbasedswitch.sw.json delete mode 100644 parser/testdata/workflows/eventbasedswitchstate.json delete mode 100644 parser/testdata/workflows/fillglassofwater.json delete mode 100644 parser/testdata/workflows/finalizeCollegeApplication.json delete mode 100644 parser/testdata/workflows/greetings-constants-file.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-secret-file.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-secret.sw.yaml delete mode 100644 parser/testdata/workflows/greetings-v08-spec.sw.yaml delete mode 100644 parser/testdata/workflows/greetings.sw.json delete mode 100644 parser/testdata/workflows/greetings.sw.yaml delete mode 100644 parser/testdata/workflows/greetings_sleep.sw.json delete mode 100644 parser/testdata/workflows/handleCarAuctionBid.json delete mode 100644 parser/testdata/workflows/helloworld.json delete mode 100644 parser/testdata/workflows/jobmonitoring.json delete mode 100644 parser/testdata/workflows/onboardcustomer.json delete mode 100644 parser/testdata/workflows/parallelexec.json delete mode 100644 parser/testdata/workflows/patientVitalsWorkflow.json delete mode 100644 parser/testdata/workflows/patientonboarding.sw.yaml delete mode 100644 parser/testdata/workflows/paymentconfirmation.json delete mode 100644 parser/testdata/workflows/provisionorders.sw.json delete mode 100644 parser/testdata/workflows/purchaseorderworkflow.sw.json delete mode 100644 parser/testdata/workflows/roomreadings.timeouts.file.sw.json delete mode 100644 parser/testdata/workflows/roomreadings.timeouts.sw.json delete mode 100644 parser/testdata/workflows/sendcloudeventonprovision.json delete mode 100644 parser/testdata/workflows/sendcustomeremail.json delete mode 100644 parser/testdata/workflows/solvemathproblems.json delete mode 100644 parser/testdata/workflows/urifiles/auth.json delete mode 100644 parser/testdata/workflows/vitalscheck.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue44.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest-issue74.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json delete mode 100644 parser/testdata/workflows/witherrors/applicationrequest.authdupl.json delete mode 100644 util/floatstr/floatstr.go delete mode 100644 util/floatstr/floatstr_test.go delete mode 100644 util/unmarshal.go delete mode 100644 util/unmarshal_test.go create mode 100644 validate/validator.go rename model/event_data_filter_validator_test.go => validate/validator_test.go (53%) delete mode 100644 validator/validator.go delete mode 100644 validator/validator_test.go delete mode 100644 validator/workflow.go diff --git a/Makefile b/Makefile index 06fde64..288693a 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,8 @@ lint: .PHONY: test coverage="false" -test: deepcopy buildergen +#test: deepcopy buildergen +test: make lint @go test ./... diff --git a/builder/builder.go b/builder/builder.go index 97ef3b2..423f6bc 100644 --- a/builder/builder.go +++ b/builder/builder.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,45 +17,37 @@ package builder import ( "encoding/json" + "github.com/serverlessworkflow/sdk-go/v4/validate" "sigs.k8s.io/yaml" - - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" ) -func New() *model.WorkflowBuilder { - return model.NewWorkflowBuilder() -} - -func Yaml(builder *model.WorkflowBuilder) ([]byte, error) { +func Validate(builder *WorkflowBuilder) error { data, err := Json(builder) if err != nil { - return nil, err + return err } - return yaml.JSONToYAML(data) -} -func Json(builder *model.WorkflowBuilder) ([]byte, error) { - workflow, err := Object(builder) + err = validate.FromJSONSource(data) if err != nil { - return nil, err + return err } - return json.Marshal(workflow) + + return nil } -func Object(builder *model.WorkflowBuilder) (*model.Workflow, error) { - workflow := builder.Build() - ctx := model.NewValidatorContext(&workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { +func Json(builder *WorkflowBuilder) ([]byte, error) { + data, err := json.MarshalIndent(builder.Node(), "", " ") + if err != nil { return nil, err } - return &workflow, nil + + return data, nil } -func Validate(object interface{}) error { - ctx := model.NewValidatorContext(object) - if err := val.GetValidator().StructCtx(ctx, object); err != nil { - return val.WorkflowError(err) +func Yaml(builder *WorkflowBuilder) ([]byte, error) { + data, err := Json(builder) + if err != nil { + return nil, err } - return nil + return yaml.JSONToYAML(data) } diff --git a/builder/builder_test.go b/builder/builder_test.go deleted file mode 100644 index 97b8c84..0000000 --- a/builder/builder_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builder - -import ( - "testing" - - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" - - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func prepareBuilder() *model.WorkflowBuilder { - builder := New().Key("key test").ID("id test") - - builder.AddFunctions().Name("function name").Operation("http://test") - builder.AddFunctions().Name("function name2").Operation("http://test") - - function3 := builder.AddFunctions().Name("function name2").Operation("http://test") - builder.RemoveFunctions(function3) - - state1 := builder.AddStates(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) - - inject := state1.InjectState() - inject.Data(map[string]model.Object{ - "test": model.FromMap(map[string]any{}), - }) - - return builder -} - -func TestValidate(t *testing.T) { - state1 := model.NewStateBuilder(). - Name("state"). - Type(model.StateTypeInject) - state1.End().Terminate(true) - err := Validate(state1) - assert.NoError(t, err) - - state2 := model.NewStateBuilder(). - Type(model.StateTypeInject) - state2.End().Terminate(true) - err = Validate(state2.Build()) - if assert.Error(t, err) { - var workflowErrors val.WorkflowErrors - if errors.As(err, &workflowErrors) { - assert.Equal(t, "state.name is required", workflowErrors[0].Error()) - } else { - // Handle other error types if necessary - t.Errorf("Unexpected error: %v", err) - } - } -} - -func TestObject(t *testing.T) { - workflow, err := Object(prepareBuilder()) - if assert.NoError(t, err) { - assert.Equal(t, "key test", workflow.Key) - assert.Equal(t, "id test", workflow.ID) - assert.Equal(t, "0.8", workflow.SpecVersion) - assert.Equal(t, "jq", workflow.ExpressionLang.String()) - assert.Equal(t, 2, len(workflow.Functions)) - - assert.Equal(t, "function name", workflow.Functions[0].Name) - assert.Equal(t, "function name2", workflow.Functions[1].Name) - } -} - -func TestJson(t *testing.T) { - data, err := Json(prepareBuilder()) - if assert.NoError(t, err) { - d := `{"id":"id test","key":"key test","version":"","specVersion":"0.8","expressionLang":"jq","states":[{"name":"state","type":"inject","end":{"terminate":true},"data":{"test":{}}}],"functions":[{"name":"function name","operation":"http://test","type":"rest"},{"name":"function name2","operation":"http://test","type":"rest"}]}` - assert.Equal(t, d, string(data)) - } -} - -func TestYaml(t *testing.T) { - data, err := Yaml(prepareBuilder()) - if assert.NoError(t, err) { - d := `expressionLang: jq -functions: -- name: function name - operation: http://test - type: rest -- name: function name2 - operation: http://test - type: rest -id: id test -key: key test -specVersion: "0.8" -states: -- data: - test: {} - end: - terminate: true - name: state - type: inject -version: "" -` - - assert.Equal(t, d, string(data)) - } -} diff --git a/builder/call.go b/builder/call.go new file mode 100644 index 0000000..0ea28ab --- /dev/null +++ b/builder/call.go @@ -0,0 +1,51 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +type CallKind string + +const ( + CallKindHttp CallKind = "http" + CallKindGrpc CallKind = "grpc" +) + +type CallBuilder struct { + root *graph.Node + with *MapBuilder +} + +func (b *CallBuilder) SetCall(call CallKind) *CallBuilder { + b.root.Edge("call").SetString(string(call)) + return b +} + +func (b *CallBuilder) GetCall() string { + return b.root.Edge("call").GetString() +} + +func (b *CallBuilder) With() *MapBuilder { + if b.with == nil { + b.with = NewMapBuilder(b.root.Edge("with")) + } + return b.with +} + +func NewCallBuilder(root *graph.Node) *CallBuilder { + return &CallBuilder{ + root: root, + } +} diff --git a/builder/do.go b/builder/do.go new file mode 100644 index 0000000..b4b20a9 --- /dev/null +++ b/builder/do.go @@ -0,0 +1,59 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "fmt" + + "github.com/serverlessworkflow/sdk-go/v4/graph" +) + +type DoBuilder struct { + root *graph.Node + tasks []any +} + +func (b *DoBuilder) AddCall(name string) (*CallBuilder, int) { + index := len(b.tasks) + nodeIndex := b.root.Edge(fmt.Sprintf("%d", index)) + nodeName := nodeIndex.Edge(name) + + callBuilder := NewCallBuilder(nodeName) + b.tasks = append(b.tasks, callBuilder) + return callBuilder, index +} + +func (b *DoBuilder) AddWait(name string) (*WaitBuilder, int) { + index := len(b.tasks) + nodeIndex := b.root.Edge(fmt.Sprintf("%d", index)) + nodeName := nodeIndex.Edge(name) + + waitBuilder := NewWaitBuilder(nodeName) + b.tasks = append(b.tasks, waitBuilder) + return waitBuilder, index +} + +func (b *DoBuilder) RemoveTask(index int) *DoBuilder { + b.tasks = append(b.tasks[:index], b.tasks[index+1:]...) + return b +} + +func NewDoBuilder(root *graph.Node) *DoBuilder { + root.List(true) + return &DoBuilder{ + root: root, + tasks: []any{}, + } +} diff --git a/builder/document.go b/builder/document.go new file mode 100644 index 0000000..abb76eb --- /dev/null +++ b/builder/document.go @@ -0,0 +1,73 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +type DocumentBuilder struct { + root *graph.Node +} + +func (b *DocumentBuilder) SetDSL(dsl string) *DocumentBuilder { + node := b.root.Edge("dsl") + node.SetString(dsl) + return b +} + +func (b *DocumentBuilder) GetDSL() string { + node := b.root.Edge("dsl") + return node.GetString() +} + +func (b *DocumentBuilder) SetNamespace(dsl string) *DocumentBuilder { + node := b.root.Edge("namespace") + node.SetString(dsl) + return b +} + +func (b *DocumentBuilder) GetNamespace() string { + node := b.root.Edge("namespace") + return node.GetString() +} + +func (b *DocumentBuilder) SetName(dsl string) *DocumentBuilder { + node := b.root.Edge("name") + node.SetString(dsl) + return b +} + +func (b *DocumentBuilder) GetName() string { + node := b.root.Edge("name") + return node.GetString() +} + +func (b *DocumentBuilder) SetVersion(dsl string) *DocumentBuilder { + node := b.root.Edge("version") + node.SetString(dsl) + return b +} + +func (b *DocumentBuilder) GetVersion() string { + node := b.root.Edge("version") + return node.GetString() +} + +func NewDocumentBuilder(root *graph.Node) *DocumentBuilder { + documentBuilder := &DocumentBuilder{ + root: root, + } + documentBuilder.SetDSL("1.0.0-alpha1") + return documentBuilder +} diff --git a/model/common.go b/builder/duration.go similarity index 53% rename from model/common.go rename to builder/duration.go index 6a9be3b..2cffdaf 100644 --- a/model/common.go +++ b/builder/duration.go @@ -1,4 +1,4 @@ -// Copyright 2021 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,14 +12,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package builder -// Common schema for Serverless Workflow specification -type Common struct { - // Metadata information - // +optional - Metadata Metadata `json:"metadata,omitempty"` +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +type DurationBuilder struct { + root *graph.Node +} + +func (b *DurationBuilder) SetSeconds(seconds int) *DurationBuilder { + b.root.Edge("seconds").SetInt(seconds) + return b } -// Metadata information -type Metadata map[string]Object +func (b *DurationBuilder) GetSeconds() int { + return b.root.Edge("seconds").GetInt() +} + +func NewDurationBuilder(root *graph.Node) *DurationBuilder { + return &DurationBuilder{ + root: root, + } +} diff --git a/model/inject_state_validator_test.go b/builder/map.go similarity index 55% rename from model/inject_state_validator_test.go rename to builder/map.go index a8f127c..c7dd6be 100644 --- a/model/inject_state_validator_test.go +++ b/builder/map.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,17 +12,24 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package builder -import "testing" +import "github.com/serverlessworkflow/sdk-go/v4/graph" -func TestInjectStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) +type MapBuilder struct { + root *graph.Node } -func TestInjectStateTimeoutStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} +func (b *MapBuilder) Set(name string, value string) { + b.root.Edge(name).SetString(value) +} + +func (b *MapBuilder) Get(name string) string { + return b.root.Edge(name).GetString() +} - StructLevelValidationCtx(t, testCases) +func NewMapBuilder(root *graph.Node) *MapBuilder { + return &MapBuilder{ + root: root, + } } diff --git a/model/action_data_filter_validator_test.go b/builder/use.go similarity index 67% rename from model/action_data_filter_validator_test.go rename to builder/use.go index df52da0..7bdc9b2 100644 --- a/model/action_data_filter_validator_test.go +++ b/builder/use.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,11 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package builder -import "testing" +import "github.com/serverlessworkflow/sdk-go/v4/graph" -func TestActionDataFilterStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) +type UseBuilder struct { + root *graph.Node +} + +func NewUseBuilder(root *graph.Node) *UseBuilder { + return &UseBuilder{ + root: root, + } } diff --git a/builder/wait.go b/builder/wait.go new file mode 100644 index 0000000..d49ad23 --- /dev/null +++ b/builder/wait.go @@ -0,0 +1,44 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +type WaitBuilder struct { + root *graph.Node + duration *DurationBuilder +} + +func (b *WaitBuilder) SetWait(wait string) { + b.root.Edge("wait").Clear().SetString(string(wait)) +} + +func (b *WaitBuilder) GetWait() string { + return b.root.Edge("wait").GetString() +} + +func (b *WaitBuilder) Duration() *DurationBuilder { + if b.duration == nil { + node := b.root.Edge("wait").Clear() + b.duration = NewDurationBuilder(node) + } + return b.duration +} + +func NewWaitBuilder(root *graph.Node) *WaitBuilder { + return &WaitBuilder{ + root: root, + } +} diff --git a/builder/workflow.go b/builder/workflow.go new file mode 100644 index 0000000..76e53f9 --- /dev/null +++ b/builder/workflow.go @@ -0,0 +1,92 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "github.com/serverlessworkflow/sdk-go/v4/graph" + "github.com/serverlessworkflow/sdk-go/v4/internal/load" +) + +type WorkflowBuilder struct { + root *graph.Node + document *DocumentBuilder + do *DoBuilder + use *UseBuilder +} + +func (b *WorkflowBuilder) Document() *DocumentBuilder { + if b.document == nil { + b.document = NewDocumentBuilder(b.root.Edge("document")) + } + return b.document +} + +func (b *WorkflowBuilder) Do() *DoBuilder { + if b.do == nil { + b.do = NewDoBuilder(b.root.Edge("do")) + } + return b.do +} + +func (b *WorkflowBuilder) Use() *UseBuilder { + if b.use == nil { + b.use = NewUseBuilder(b.root.Edge("use")) + } + return b.use +} + +func (b *WorkflowBuilder) Node() *graph.Node { + return b.root +} + +func NewWorkflowBuilder() *WorkflowBuilder { + root := graph.NewNode() + return &WorkflowBuilder{ + root: root, + } +} + +func NewWorkflowBuilderFromFile(path string) (*WorkflowBuilder, error) { + root, _, err := load.FromFile(path) + if err != nil { + return nil, err + } + + return &WorkflowBuilder{ + root: root, + }, nil +} + +func NewWorkflowBuilderFromYAMLSource(source []byte) (*WorkflowBuilder, error) { + root, _, err := load.FromYAMLSource(source) + if err != nil { + return nil, err + } + + return &WorkflowBuilder{ + root: root, + }, nil +} + +func NewWorkflowBuilderFromJSONSource(source []byte) (*WorkflowBuilder, error) { + root, _, err := load.FromJSONSource(source) + if err != nil { + return nil, err + } + + return &WorkflowBuilder{ + root: root, + }, nil +} diff --git a/example/example.go b/example/example.go new file mode 100644 index 0000000..82a2e66 --- /dev/null +++ b/example/example.go @@ -0,0 +1,99 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "log" + + "github.com/serverlessworkflow/sdk-go/v4/builder" + "github.com/serverlessworkflow/sdk-go/v4/validate" +) + +func main() { + build() + buildFromSource() + validExample() +} + +func build() { + fmt.Println("builder") + + workflowBuilder := builder.NewWorkflowBuilder() + documentBuilder := workflowBuilder.Document() + documentBuilder.SetName("test") + documentBuilder.SetNamespace("test") + documentBuilder.SetVersion("1.0.0") + + doBuilder := workflowBuilder.Do() + callBuilder, _ := doBuilder.AddCall("test") + callBuilder.SetCall("http") + withBuilder := callBuilder.With() + withBuilder.Set("method", "get") + withBuilder.Set("endpoint", "https://petstore.swagger.io/v2/pet/{petId}") + + err := builder.Validate(workflowBuilder) + if err != nil { + fmt.Println(err) + } + + fmt.Println("json") + data, _ := builder.Json(workflowBuilder) + fmt.Println(string(data)) + fmt.Println("") + + fmt.Println("yaml") + data, _ = builder.Yaml(workflowBuilder) + fmt.Println(string(data)) +} + +func buildFromSource() { + fmt.Println("build from source") + + workflowBuilder, err := builder.NewWorkflowBuilderFromFile("./example/example1.yaml") + if err != nil { + log.Fatal(err) + } else { + fmt.Println("document.name:", workflowBuilder.Document().GetName()) + } + + err = builder.Validate(workflowBuilder) + if err != nil { + log.Fatal(err) + } else { + fmt.Println("success") + } +} + +func validExample() { + fmt.Println("valid") + + fmt.Println("./example/example1.yaml") + err := validate.FromFile("./example/example1.yaml") + if err != nil { + log.Fatal(err) + } else { + fmt.Println("success") + } + + fmt.Println("") + fmt.Println("./example/example2.yaml") + err = validate.FromFile("./example/example2.yaml") + if err != nil { + log.Fatal(err) + } else { + fmt.Println("success") + } +} diff --git a/parser/testdata/workflows/urifiles/auth.yaml b/example/example1.yaml similarity index 64% rename from parser/testdata/workflows/urifiles/auth.yaml rename to example/example1.yaml index 14ba4e2..9619e5e 100644 --- a/parser/testdata/workflows/urifiles/auth.yaml +++ b/example/example1.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 The Serverless Workflow Specification Authors +# Copyright 2024 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -- name: testAuth - properties: - token: test_token - scheme: bearer -- name: testAuth2 - properties: - password: test_pwd - username: test_user - scheme: basic +document: + dsl: 1.0.0-alpha1 + namespace: examples + name: call-http-shorthand-endpoint + version: 1.0.0-alpha1 +do: + - test: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaValidation.yaml b/example/example2.yaml similarity index 56% rename from parser/testdata/workflows/dataInputSchemaValidation.yaml rename to example/example2.yaml index 4bc1e11..eca7a19 100644 --- a/parser/testdata/workflows/dataInputSchemaValidation.yaml +++ b/example/example2.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 The Serverless Workflow Specification Authors +# Copyright 2024 The Serverless Workflow Specification Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -id: Valid DataInputSchema -version: '1.0' -specVersion: '0.8' -start: Start -dataInputSchema: - failOnValidationErrors: false - schema: "file://testdata/datainputschema.json" -states: -- name: Start - type: inject - data: - done: true - end: - terminate: true \ No newline at end of file +document: + dsl: '1.0.0-alpha1' + namespace: test + name: grpc-example + version: '0.1.0' +do: + - greet: + call: grpc + with: + proto: file://app/greet.proto + service: + name: GreeterApi.Greeter + host: localhost + port: 5011 + method: SayHello + arguments: + name: ${ .user.preferredDisplayName } \ No newline at end of file diff --git a/go.mod b/go.mod index 62aae70..3e30314 100644 --- a/go.mod +++ b/go.mod @@ -1,34 +1,32 @@ -module github.com/serverlessworkflow/sdk-go/v2 +module github.com/serverlessworkflow/sdk-go/v4 -go 1.19 +go 1.21 + +toolchain go1.23.0 require ( - github.com/go-playground/validator/v10 v10.11.1 github.com/pkg/errors v0.9.1 - github.com/relvacode/iso8601 v1.3.0 - github.com/sosodev/duration v1.2.0 + github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 github.com/stretchr/testify v1.8.0 + github.com/xeipuuv/gojsonschema v1.2.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/apimachinery v0.26.2 sigs.k8s.io/controller-runtime v0.14.4 - sigs.k8s.io/yaml v1.3.0 + sigs.k8s.io/yaml v1.4.0 ) require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-logr/logr v1.2.3 // indirect - github.com/go-playground/locales v0.14.0 // indirect - github.com/go-playground/universal-translator v0.18.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/leodido/go-urn v1.2.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/crypto v0.15.0 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect golang.org/x/net v0.18.0 // indirect - golang.org/x/sys v0.14.0 // indirect golang.org/x/text v0.14.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/go.sum b/go.sum index fa248b6..a0e2b00 100644 --- a/go.sum +++ b/go.sum @@ -1,21 +1,15 @@ -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= -github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -23,44 +17,39 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/onsi/ginkgo/v2 v2.6.0 h1:9t9b9vRUbFq3C4qKFCGkVuq/fIHji802N1nrtkh1mNc= +github.com/onsi/ginkgo/v2 v2.6.0/go.mod h1:63DOGlLAH8+REH8jUGdL3YpCpu7JODesutUjdENfUAc= github.com/onsi/gomega v1.24.1 h1:KORJXNNTzJXzu4ScJWssJfJMnJ+2QJqhoQSRwNlze9E= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/onsi/gomega v1.24.1/go.mod h1:3AOiACssS3/MajrniINInwbfOOtfZvplPzuRSmvt1jM= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/relvacode/iso8601 v1.3.0 h1:HguUjsGpIMh/zsTczGN3DVJFxTU/GX+MMmzcKoMO7ko= -github.com/relvacode/iso8601 v1.3.0/go.mod h1:FlNp+jz+TXpyRqgmM7tnzHHzBnz776kmAH2h3sZCn0I= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/sosodev/duration v1.2.0 h1:pqK/FLSjsAADWY74SyWDCjOcd5l7H8GSnnOGEB9A1Us= -github.com/sosodev/duration v1.2.0/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= @@ -68,9 +57,6 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= -golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -79,7 +65,6 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= @@ -91,9 +76,7 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= @@ -111,20 +94,18 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= k8s.io/api v0.26.1 h1:f+SWYiPd/GsiWwVRz+NbFyCgvv75Pk9NK6dlkZgpCRQ= +k8s.io/api v0.26.1/go.mod h1:xd/GBNgR0f707+ATNyPmQ1oyKSgndzXij81FzWGsejg= k8s.io/apimachinery v0.26.2 h1:da1u3D5wfR5u2RpLhE/ZtZS2P7QvDgLZTi9wrNZl/tQ= k8s.io/apimachinery v0.26.2/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 h1:hWRbsoRWt44OEBnYUd4ceLy4ofBoh+p9vauWp/I5Gdg= @@ -137,5 +118,5 @@ sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 h1:iXTIw73aPyC+oRdyqqvVJuloN sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= -sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= -sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/graph/graph.go b/graph/graph.go new file mode 100644 index 0000000..fe6e3d0 --- /dev/null +++ b/graph/graph.go @@ -0,0 +1,226 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "bytes" + "encoding/json" + "log" + "strings" +) + +type Lookup struct { + nodes []*Node +} + +func (l Lookup) Empty() bool { + return len(l.nodes) == 0 +} + +func (l Lookup) First() *Node { + return l.nodes[0] +} + +func (l Lookup) Get(index int) *Node { + return l.nodes[index] +} + +func (l Lookup) List() []*Node { + return l.nodes +} + +type Node struct { + value interface{} + order []string + parent *Node + edges map[string]*Node + list bool +} + +func (n *Node) List(list bool) { + n.list = list +} + +func (n *Node) IsList() bool { + return n.list +} + +func (n *Node) UnmarshalJSON(data []byte) error { + return unmarshalNode(n, data) +} + +func (n *Node) MarshalJSON() ([]byte, error) { + return marshalNode(n) +} + +func (n *Node) Edge(name string) *Node { + if n.HasValue() { + log.Fatal("value alredy defined, execute clear first") + } + if _, ok := n.edges[name]; !ok { + newNode := NewNode() + newNode.parent = n + n.edges[name] = newNode + n.order = append(n.order, name) + } + return n.edges[name] +} + +func (n *Node) SetString(value string) *Node { + n.setValue(value) + return n +} + +func (n *Node) SetInt(value int) *Node { + n.setValue(value) + return n +} + +func (n *Node) SetFloat(value float32) *Node { + n.setValue(value) + return n +} + +func (n *Node) SetBool(value bool) *Node { + n.setValue(value) + return n +} + +func (n *Node) setValue(value any) { + if len(n.edges) > 0 { + log.Fatal("alredy defined edges, execute clear fist") + } + n.value = value +} + +func (n *Node) GetString() string { + return n.value.(string) +} + +func (n *Node) GetInt() int { + return n.value.(int) +} + +func (n *Node) GetFloat() float32 { + return n.value.(float32) +} + +func (n *Node) HasValue() bool { + return n.value != nil +} + +func (n *Node) Clear() *Node { + n.value = nil + n.edges = map[string]*Node{} + n.order = []string{} + return n +} + +func (n *Node) Parent() *Node { + return n.parent +} + +func (n *Node) Index(i int) (string, *Node) { + lookup := n.Lookup(n.order[i]) + if !lookup.Empty() { + return n.order[i], lookup.First() + } + return "", nil +} + +func (n *Node) Lookup(path string) Lookup { + dotIndex := strings.Index(path, ".") + var key string + if dotIndex == -1 { + key = strings.TrimSpace(path) + } else { + key = strings.TrimSpace(path[0:dotIndex]) + path = path[dotIndex+1:] + } + + var currentNode *Node + if key == "*" { + nodes := []*Node{} + if dotIndex == -1 { + for _, node := range n.edges { + nodes = append(nodes, node) + } + return Lookup{nodes} + } + for _, node := range n.edges { + if nodesLookup := node.Lookup(path); !nodesLookup.Empty() { + nodes = append(nodes, nodesLookup.List()...) + } + } + return Lookup{nodes} + + } + + equalIndex := strings.Index(key, "=") + if equalIndex != -1 { + value := key[equalIndex+1:] + key := key[:equalIndex] + + lookup := n.Lookup(key) + if !lookup.Empty() && lookup.First().value != value { + return Lookup{} + } + + return Lookup{[]*Node{n}} + } + + currentNode = n.edges[key] + if currentNode == nil { + return Lookup{} + } + if dotIndex == -1 { + return Lookup{[]*Node{currentNode}} + } + + return currentNode.Lookup(path) +} + +func NewNode() *Node { + return (&Node{}).Clear() +} + +func UnmarshalJSON(data []byte) (*Node, error) { + node := NewNode() + err := json.Unmarshal(data, &node) + if err != nil { + return nil, err + } + + return node, nil +} + +func MarshalJSON(n *Node) ([]byte, error) { + data, err := json.Marshal(n) + if err != nil { + return nil, err + } + + var out bytes.Buffer + err = json.Indent(&out, data, "", " ") + if err != nil { + return nil, err + } + + return out.Bytes(), nil +} + +func LoadExternalResource(n *Node) error { + return nil +} diff --git a/graph/graph_test.go b/graph/graph_test.go new file mode 100644 index 0000000..61f421b --- /dev/null +++ b/graph/graph_test.go @@ -0,0 +1,127 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGraph(t *testing.T) { + source := []byte(`{ + "test": "val", + "test2": "val2", + "list": [ + "test1" + ], + "listObject": [ + { + "test": "val" + }, + { + "test": "val1" + }, + { + "test2": "va2" + } + ], + "deep": [ + [ + { + "test": [ + { + "test2": "val", + "test3": "val1", + "test4": { + "test5": "val" + } + }, + { + "test2": "val" + } + ] + } + ] + ] +}`) + + root, err := UnmarshalJSON(source) + if !assert.NoError(t, err) { + return + } + + t.Run("marshal", func(t *testing.T) { + _, err := MarshalJSON(root) + assert.NoError(t, err) + }) + + t.Run("lookup key", func(t *testing.T) { + lookup := root.Lookup("test") + assert.NoError(t, err) + assert.Equal(t, "val", lookup.First().value) + }) + + t.Run("lookup not found", func(t *testing.T) { + lookup := root.Lookup("list2") + assert.Equal(t, 0, len(lookup.List())) + }) + + t.Run("lookup list", func(t *testing.T) { + lookup := root.Lookup("list") + assert.Nil(t, lookup.First().value) + assert.Equal(t, 1, len(lookup.First().edges)) + + lookup = root.Lookup("listObject.*") + assert.Nil(t, lookup.Get(0).value) + assert.Equal(t, 3, len(lookup.List())) + }) + + t.Run("lookup list index", func(t *testing.T) { + lookup := root.Lookup("list.0") + assert.Equal(t, "test1", lookup.First().value) + }) + + t.Run("lookup search in a list", func(t *testing.T) { + lookup := root.Lookup("listObject.*.test") + assert.Equal(t, 2, len(lookup.List())) + assert.Equal(t, "val", lookup.Get(0).value) + assert.Equal(t, "val1", lookup.Get(1).value) + }) + + t.Run("lookup deep", func(t *testing.T) { + lookup := root.Lookup("deep.*.*.test.*.test2") + assert.Equal(t, 2, len(lookup.List())) + assert.Equal(t, "val", lookup.Get(0).value) + assert.Equal(t, "val", lookup.Get(1).value) + + lookup = root.Lookup("deep.*.*.test.*.test2=val") + assert.Equal(t, 2, len(lookup.List())) + assert.Equal(t, 3, len(lookup.Get(0).edges)) + assert.Equal(t, 1, len(lookup.Get(1).edges)) + }) + + t.Run("lookup with value equality", func(t *testing.T) { + lookup := root.Lookup("deep.*.*.test.*.test2=val") + assert.Equal(t, 2, len(lookup.List())) + + assert.Equal(t, "val", lookup.Get(0).Lookup("test2").First().value) + assert.Equal(t, "val1", lookup.Get(0).Lookup("test3").First().GetString()) + + assert.Equal(t, false, lookup.Get(1).Lookup("test2").Empty()) + assert.Equal(t, true, lookup.Get(1).Lookup("test3").Empty()) + assert.Equal(t, "val", lookup.Get(1).Lookup("test2").First().GetString()) + }) +} diff --git a/graph/marshal.go b/graph/marshal.go new file mode 100644 index 0000000..11ae3bc --- /dev/null +++ b/graph/marshal.go @@ -0,0 +1,60 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "encoding/json" +) + +func marshalNode(n *Node) ([]byte, error) { + if n.value != nil { + return json.Marshal(n.value) + } + + var out []byte + if n.list { + out = append(out, '[') + } else { + out = append(out, '{') + } + + nEdge := len(n.order) - 1 + for i, edge := range n.order { + node := n.edges[edge] + val, err := json.Marshal(node) + if err != nil { + return nil, err + } + + if n.list { + out = append(out, val...) + } else { + out = append(out, []byte("\""+edge+"\":")...) + out = append(out, val...) + } + + if nEdge != i { + out = append(out, byte(',')) + } + } + + if n.list { + out = append(out, ']') + } else { + out = append(out, '}') + } + + return out, nil +} diff --git a/graph/unmarshal.go b/graph/unmarshal.go new file mode 100644 index 0000000..d9fc23c --- /dev/null +++ b/graph/unmarshal.go @@ -0,0 +1,167 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "path/filepath" + "strings" + "time" + + "sigs.k8s.io/yaml" + + "github.com/serverlessworkflow/sdk-go/v4/internal/util" +) + +// TODO: Remove global variable +var HttpClient = http.Client{Timeout: time.Duration(1) * time.Second} + +func unmarshalNode(n *Node, data []byte) error { + data = bytes.TrimSpace(data) + if data[0] == '{' { + return unmarshalObject(n, data) + } else if data[0] == '[' { + return unmarshalList(n, data) + } + + return json.Unmarshal(data, &n.value) +} + +func unmarshalObject(n *Node, data []byte) error { + dataMap := map[string]json.RawMessage{} + err := json.Unmarshal(data, &dataMap) + if err != nil { + return err + } + + for key, val := range dataMap { + node := n.Edge(key) + err := json.Unmarshal(val, &node) + if err != nil { + return err + } + + } + + return nil +} + +func unmarshalList(n *Node, data []byte) error { + dataMap := []json.RawMessage{} + err := json.Unmarshal(data, &dataMap) + if err != nil { + return err + } + + n.List(true) + + for i, val := range dataMap { + key := fmt.Sprintf("%d", i) + node := n.Edge(key) + err := json.Unmarshal(val, &node) + if err != nil { + return err + } + } + + return nil +} + +func loadExternalResource(url string) (b []byte, err error) { + index := strings.Index(url, "://") + if index == -1 { + b, err = getBytesFromFile(url) + } else { + scheme := url[:index] + switch scheme { + case "http", "https": + b, err = getBytesFromHttp(url) + case "file": + b, err = getBytesFromFile(url[index+3:]) + default: + return nil, fmt.Errorf("unsupported scheme: %q", scheme) + } + } + if err != nil { + return + } + + // TODO: optimize this + // NOTE: In specification, we can declare independent definitions with another file format, so + // we must convert independently yaml source to json format data before unmarshal. + if !json.Valid(b) { + b, err = yaml.YAMLToJSON(b) + if err != nil { + return nil, err + } + return b, nil + } + + return b, nil +} + +func getBytesFromFile(path string) ([]byte, error) { + if util.WebAssembly() { + return nil, fmt.Errorf("unsupported open file") + } + + // if path is relative, search in include paths + if !filepath.IsAbs(path) { + paths := util.IncludePaths() + pathFound := false + for i := 0; i < len(paths) && !pathFound; i++ { + sn := filepath.Join(paths[i], path) + _, err := os.Stat(sn) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + return nil, err + } + } else { + path = sn + pathFound = true + } + } + if !pathFound { + return nil, fmt.Errorf("file not found: %q", path) + } + } + + return os.ReadFile(filepath.Clean(path)) +} + +func getBytesFromHttp(url string) ([]byte, error) { + req, err := http.NewRequest(http.MethodGet, url, nil) + if err != nil { + return nil, err + } + + resp, err := HttpClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + buf := new(bytes.Buffer) + if _, err = buf.ReadFrom(resp.Body); err != nil { + return nil, err + } + + return buf.Bytes(), nil +} diff --git a/hack/conv/main.go b/hack/conv/main.go index e70e738..eb54435 100644 --- a/hack/conv/main.go +++ b/hack/conv/main.go @@ -25,7 +25,7 @@ import ( "gopkg.in/yaml.v3" - "github.com/serverlessworkflow/sdk-go/v2/test" + "github.com/serverlessworkflow/sdk-go/v4/test" ) func convert(i interface{}) interface{} { diff --git a/internal/dsl/default.go b/internal/dsl/default.go new file mode 100644 index 0000000..44a727c --- /dev/null +++ b/internal/dsl/default.go @@ -0,0 +1,70 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dsl + +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +func ApplyDefault(node *graph.Node) error { + lookup := node.Lookup("do.*.*.call=http") + + for _, node := range lookup.List() { + lookupEdged := node.Lookup("with.content") + for _, nodeEdge := range lookupEdged.List() { + if !nodeEdge.HasValue() { + nodeEdge.SetString("content") + } + } + } + + lookup = node.Lookup("do.*.*.then") + for _, node := range lookup.List() { + if !node.HasValue() { + node.SetString("continue") + } + } + + lookup = node.Lookup("do.*.*.fork") + for _, node := range lookup.List() { + if !node.Edge("compete").HasValue() { + node.SetBool(false) + } + } + + lookup = node.Lookup("do.*.*.run.workflow") + for _, node := range lookup.List() { + if !node.Edge("version").HasValue() { + node.Edge("version").SetString("latest") + } + } + + lookup = node.Lookup("do.*.*.catch") + for _, node := range lookup.List() { + if !node.Edge("catch").HasValue() { + node.Edge("catch").SetString("error") + } + } + + lookup = node.Lookup("evaluate.language") + if !lookup.Empty() { + node.Edge("evaluate").Edge("language").SetString("jq") + } + + lookup = node.Lookup("evaluate.mode") + if !lookup.Empty() { + node.Edge("evaluate").Edge("mode").SetString("strict") + } + + return nil +} diff --git a/internal/dsl/dsl.go b/internal/dsl/dsl.go new file mode 100644 index 0000000..0f579dd --- /dev/null +++ b/internal/dsl/dsl.go @@ -0,0 +1,1041 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dsl + +const DSLSpec = ` +$id: https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.yaml +$schema: https://json-schema.org/draft/2020-12/schema +description: Serverless Workflow DSL - Workflow Schema +type: object +properties: + document: + type: object + properties: + dsl: + type: string + pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$ + description: The version of the DSL used by the workflow. + namespace: + type: string + pattern: ^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$ + description: The workflow's namespace. + name: + type: string + pattern: ^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$ + description: The workflow's name. + version: + type: string + pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$ + description: The workflow's semantic version. + title: + type: string + description: The workflow's title. + summary: + type: string + description: The workflow's Markdown summary. + tags: + type: object + description: A key/value mapping of the workflow's tags, if any. + additionalProperties: true + required: [ dsl, namespace, name, version ] + description: Documents the workflow + input: + $ref: '#/$defs/input' + description: Configures the workflow's input. + use: + type: object + properties: + authentications: + type: object + additionalProperties: + $ref: '#/$defs/authenticationPolicy' + description: The workflow's reusable authentication policies. + errors: + type: object + additionalProperties: + $ref: '#/$defs/error' + description: The workflow's reusable errors. + extensions: + type: array + items: + type: object + title: ExtensionItem + minProperties: 1 + maxProperties: 1 + additionalProperties: + $ref: '#/$defs/extension' + description: The workflow's extensions. + functions: + type: object + additionalProperties: + $ref: '#/$defs/task' + description: The workflow's reusable functions. + retries: + type: object + additionalProperties: + $ref: '#/$defs/retryPolicy' + description: The workflow's reusable retry policies. + secrets: + type: array + items: + type: string + description: The workflow's secrets. + description: Defines the workflow's reusable components. + do: + description: Defines the task(s) the workflow must perform + $ref: '#/$defs/taskList' + timeout: + $ref: '#/$defs/timeout' + description: The workflow's timeout configuration, if any. + output: + $ref: '#/$defs/output' + description: Configures the workflow's output. + schedule: + type: object + properties: + every: + $ref: '#/$defs/duration' + description: Specifies the duration of the interval at which the workflow should be executed. + cron: + type: string + description: Specifies the schedule using a cron expression, e.g., '0 0 * * *' for daily at midnight." + after: + $ref: '#/$defs/duration' + description: Specifies a delay duration that the workflow must wait before starting again after it completes. + on: + $ref: '#/$defs/eventConsumptionStrategy' + description: Specifies the events that trigger the workflow execution. + description: Schedules the workflow +$defs: + taskList: + type: array + items: + type: object + title: TaskItem + minProperties: 1 + maxProperties: 1 + additionalProperties: + $ref: '#/$defs/task' + taskBase: + type: object + properties: + if: + type: string + description: A runtime expression, if any, used to determine whether or not the task should be run. + input: + $ref: '#/$defs/input' + description: Configure the task's input. + output: + $ref: '#/$defs/output' + description: Configure the task's output. + export: + $ref: '#/$defs/export' + description: Export task output to context. + timeout: + $ref: '#/$defs/timeout' + description: The task's timeout configuration, if any. + then: + $ref: '#/$defs/flowDirective' + description: The flow directive to be performed upon completion of the task. + task: + unevaluatedProperties: false + oneOf: + - $ref: '#/$defs/callTask' + - $ref: '#/$defs/doTask' + - $ref: '#/$defs/forkTask' + - $ref: '#/$defs/emitTask' + - $ref: '#/$defs/forTask' + - $ref: '#/$defs/listenTask' + - $ref: '#/$defs/raiseTask' + - $ref: '#/$defs/runTask' + - $ref: '#/$defs/setTask' + - $ref: '#/$defs/switchTask' + - $ref: '#/$defs/tryTask' + - $ref: '#/$defs/waitTask' + callTask: + oneOf: + - title: CallAsyncAPI + $ref: '#/$defs/taskBase' + type: object + required: [ call, with ] + unevaluatedProperties: false + properties: + call: + type: string + const: asyncapi + with: + title: WithAsyncAPI + type: object + properties: + document: + $ref: '#/$defs/externalResource' + description: The document that defines the AsyncAPI operation to call. + operationRef: + type: string + description: A reference to the AsyncAPI operation to call. + server: + type: string + description: A a reference to the server to call the specified AsyncAPI operation on. If not set, default to the first server matching the operation's channel. + message: + type: string + description: The name of the message to use. If not set, defaults to the first message defined by the operation. + binding: + type: string + description: The name of the binding to use. If not set, defaults to the first binding defined by the operation. + payload: + type: object + description: The payload to call the AsyncAPI operation with, if any. + authentication: + $ref: '#/$defs/referenceableAuthenticationPolicy' + description: The authentication policy, if any, to use when calling the AsyncAPI operation. + required: [ document, operationRef ] + additionalProperties: false + description: Defines the AsyncAPI call to perform. + - title: CallGRPC + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ call, with ] + properties: + call: + type: string + const: grpc + with: + title: WithGRPC + type: object + properties: + proto: + $ref: '#/$defs/externalResource' + description: The proto resource that describes the GRPC service to call. + service: + type: object + properties: + name: + type: string + description: The name of the GRPC service to call. + host: + type: string + pattern: ^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$ + description: The hostname of the GRPC service to call. + port: + type: integer + min: 0 + max: 65535 + description: The port number of the GRPC service to call. + authentication: + $ref: '#/$defs/referenceableAuthenticationPolicy' + description: The endpoint's authentication policy, if any. + required: [ name, host ] + method: + type: string + description: The name of the method to call on the defined GRPC service. + arguments: + type: object + additionalProperties: true + description: The arguments, if any, to call the method with. + required: [ proto, service, method ] + additionalProperties: false + description: Defines the GRPC call to perform. + - title: CallHTTP + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ call, with ] + properties: + call: + type: string + const: http + with: + title: WithHTTP + type: object + properties: + method: + type: string + description: The HTTP method of the HTTP request to perform. + endpoint: + description: The HTTP endpoint to send the request to. + oneOf: + - $ref: '#/$defs/endpoint' + - type: string + format: uri-template + headers: + type: object + description: A name/value mapping of the headers, if any, of the HTTP request to perform. + body: + description: The body, if any, of the HTTP request to perform. + output: + type: string + enum: [ raw, content, response ] + description: The http call output format. Defaults to 'content'. + required: [ method, endpoint ] + additionalProperties: false + description: Defines the HTTP call to perform. + - title: CallOpenAPI + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ call, with ] + properties: + call: + type: string + const: openapi + with: + title: WithOpenAPI + type: object + properties: + document: + $ref: '#/$defs/externalResource' + description: The document that defines the OpenAPI operation to call. + operationId: + type: string + description: The id of the OpenAPI operation to call. + parameters: + type: object + additionalProperties: true + description: A name/value mapping of the parameters of the OpenAPI operation to call. + authentication: + $ref: '#/$defs/referenceableAuthenticationPolicy' + description: The authentication policy, if any, to use when calling the OpenAPI operation. + output: + type: string + enum: [ raw, content, response ] + description: The http call output format. Defaults to 'content'. + required: [ document, operationId ] + additionalProperties: false + description: Defines the OpenAPI call to perform. + - title: CallFunction + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ call ] + properties: + call: + type: string + not: + enum: ["asyncapi", "grpc", "http", "openapi"] + description: The name of the function to call. + with: + type: object + additionalProperties: true + description: A name/value mapping of the parameters, if any, to call the function with. + forkTask: + description: Allows workflows to execute multiple tasks concurrently and optionally race them against each other, with a single possible winner, which sets the task's output. + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ fork ] + properties: + fork: + type: object + required: [ branches ] + properties: + branches: + $ref: '#/$defs/taskList' + compete: + description: Indicates whether or not the concurrent tasks are racing against each other, with a single possible winner, which sets the composite task's output. + type: boolean + default: false + doTask: + description: Allows to execute a list of tasks in sequence + $ref: '#/$defs/taskBase' + type: object + unevaluatedProperties: false + required: [ do ] + properties: + do: + $ref: '#/$defs/taskList' + emitTask: + description: Allows workflows to publish events to event brokers or messaging systems, facilitating communication and coordination between different components and services. + $ref: '#/$defs/taskBase' + type: object + required: [ emit ] + unevaluatedProperties: false + properties: + emit: + type: object + properties: + event: + type: object + properties: + id: + type: string + description: The event's unique identifier + source: + type: string + format: uri + description: Identifies the context in which an event happened + type: + type: string + description: This attribute contains a value describing the type of event related to the originating occurrence. + time: + type: string + format: date-time + subject: + type: string + datacontenttype: + type: string + description: Content type of data value. This attribute enables data to carry any type of content, whereby format and encoding might differ from that of the chosen event format. + dataschema: + type: string + format: uri + required: [ source, type ] + additionalProperties: true + required: [ event ] + forTask: + description: Allows workflows to iterate over a collection of items, executing a defined set of subtasks for each item in the collection. This task type is instrumental in handling scenarios such as batch processing, data transformation, and repetitive operations across datasets. + $ref: '#/$defs/taskBase' + type: object + required: [ for, do ] + unevaluatedProperties: false + properties: + for: + type: object + properties: + each: + type: string + description: The name of the variable used to store the current item being enumerated. + default: item + in: + type: string + description: A runtime expression used to get the collection to enumerate. + at: + type: string + description: The name of the variable used to store the index of the current item being enumerated. + default: index + required: [ in ] + while: + type: string + description: A runtime expression that represents the condition, if any, that must be met for the iteration to continue. + do: + $ref: '#/$defs/taskList' + listenTask: + description: Provides a mechanism for workflows to await and react to external events, enabling event-driven behavior within workflow systems. + $ref: '#/$defs/taskBase' + type: object + required: [ listen ] + unevaluatedProperties: false + properties: + listen: + type: object + properties: + to: + $ref: '#/$defs/eventConsumptionStrategy' + description: Defines the event(s) to listen to. + required: [ to ] + raiseTask: + description: Intentionally triggers and propagates errors. + $ref: '#/$defs/taskBase' + type: object + required: [ raise ] + unevaluatedProperties: false + properties: + raise: + type: object + properties: + error: + $ref: '#/$defs/error' + description: Defines the error to raise. + required: [ error ] + runTask: + description: Provides the capability to execute external containers, shell commands, scripts, or workflows. + $ref: '#/$defs/taskBase' + type: object + required: [ run ] + unevaluatedProperties: false + properties: + run: + type: object + oneOf: + - title: RunContainer + properties: + container: + type: object + properties: + image: + type: string + description: The name of the container image to run. + command: + type: string + description: The command, if any, to execute on the container + ports: + type: object + description: The container's port mappings, if any. + volumes: + type: object + description: The container's volume mappings, if any. + environment: + title: ContainerEnvironment + type: object + description: A key/value mapping of the environment variables, if any, to use when running the configured process. + required: [ image ] + required: [ container ] + description: Enables the execution of external processes encapsulated within a containerized environment. + - title: RunScript + properties: + script: + type: object + properties: + language: + type: string + description: The language of the script to run. + environment: + title: ScriptEnvironment + type: object + additionalProperties: true + description: A key/value mapping of the environment variables, if any, to use when running the configured process. + oneOf: + - title: ScriptInline + properties: + code: + type: string + required: [ code ] + description: The script's code. + - title: ScriptExternal + properties: + source: + $ref: '#/$defs/externalResource' + description: The script's resource. + required: [ source ] + required: [ language ] + required: [ script ] + description: Enables the execution of custom scripts or code within a workflow, empowering workflows to perform specialized logic, data processing, or integration tasks by executing user-defined scripts written in various programming languages. + - title: RunShell + properties: + shell: + type: object + properties: + command: + type: string + description: The shell command to run. + arguments: + title: ShellArguments + type: object + additionalProperties: true + description: A list of the arguments of the shell command to run. + environment: + title: ShellEnvironment + type: object + additionalProperties: true + description: A key/value mapping of the environment variables, if any, to use when running the configured process. + required: [ command ] + required: [ shell ] + description: Enables the execution of shell commands within a workflow, enabling workflows to interact with the underlying operating system and perform system-level operations, such as file manipulation, environment configuration, or system administration tasks. + - title: RunWokflow + properties: + workflow: + title: RunWorkflowDescriptor + type: object + properties: + namespace: + type: string + description: The namespace the workflow to run belongs to. + name: + type: string + description: The name of the workflow to run. + version: + type: string + default: latest + description: The version of the workflow to run. Defaults to latest + input: + title: WorkflowInput + type: object + additionalProperties: true + description: The data, if any, to pass as input to the workflow to execute. The value should be validated against the target workflow's input schema, if specified. + required: [ namespace, name, version ] + required: [ workflow ] + description: Enables the invocation and execution of nested workflows within a parent workflow, facilitating modularization, reusability, and abstraction of complex logic or business processes by encapsulating them into standalone workflow units. + setTask: + description: A task used to set data + $ref: '#/$defs/taskBase' + type: object + required: [ set ] + unevaluatedProperties: false + properties: + set: + type: object + minProperties: 1 + additionalProperties: true + description: The data to set + switchTask: + description: Enables conditional branching within workflows, allowing them to dynamically select different paths based on specified conditions or criteria + $ref: '#/$defs/taskBase' + type: object + required: [ switch ] + unevaluatedProperties: false + properties: + switch: + type: array + minItems: 1 + items: + type: object + minProperties: 1 + maxProperties: 1 + title: SwitchItem + additionalProperties: + type: object + title: SwitchCase + properties: + name: + type: string + description: The case's name. + when: + type: string + description: A runtime expression used to determine whether or not the case matches. + then: + $ref: '#/$defs/flowDirective' + description: The flow directive to execute when the case matches. + tryTask: + description: Serves as a mechanism within workflows to handle errors gracefully, potentially retrying failed tasks before proceeding with alternate ones. + $ref: '#/$defs/taskBase' + type: object + required: [ try, catch ] + unevaluatedProperties: false + properties: + try: + description: The task(s) to perform. + $ref: '#/$defs/taskList' + catch: + type: object + properties: + errors: + title: CatchErrors + type: object + as: + type: string + description: The name of the runtime expression variable to save the error as. Defaults to 'error'. + when: + type: string + description: A runtime expression used to determine whether or not to catch the filtered error + exceptWhen: + type: string + description: A runtime expression used to determine whether or not to catch the filtered error + retry: + $ref: '#/$defs/retryPolicy' + description: The retry policy to use, if any, when catching errors. + do: + description: The definition of the task(s) to run when catching an error. + $ref: '#/$defs/taskList' + waitTask: + description: Allows workflows to pause or delay their execution for a specified period of time. + $ref: '#/$defs/taskBase' + type: object + required: [ wait ] + unevaluatedProperties: false + properties: + wait: + description: The amount of time to wait. + $ref: '#/$defs/duration' + flowDirective: + additionalProperties: false + anyOf: + - type: string + enum: [ continue, exit, end ] + default: continue + - type: string + referenceableAuthenticationPolicy: + type: object + oneOf: + - title: AuthenticationPolicyReference + properties: + use: + type: string + minLength: 1 + description: The name of the authentication policy to use + required: [use] + - $ref: '#/$defs/authenticationPolicy' + secretBasedAuthenticationPolicy: + type: object + properties: + use: + type: string + minLength: 1 + description: The name of the authentication policy to use + required: [use] + authenticationPolicy: + type: object + oneOf: + - title: BasicAuthenticationPolicy + properties: + basic: + type: object + oneOf: + - properties: + username: + type: string + description: The username to use. + password: + type: string + description: The password to use. + required: [ username, password ] + - $ref: '#/$defs/secretBasedAuthenticationPolicy' + required: [ basic ] + description: Use basic authentication. + - title: BearerAuthenticationPolicy + properties: + bearer: + type: object + oneOf: + - properties: + token: + type: string + description: The bearer token to use. + required: [ token ] + - $ref: '#/$defs/secretBasedAuthenticationPolicy' + required: [ bearer ] + description: Use bearer authentication. + - title: OAuth2AuthenticationPolicy + properties: + oauth2: + type: object + oneOf: + - properties: + authority: + type: string + format: uri + description: The URI that references the OAuth2 authority to use. + grant: + type: string + description: The grant type to use. + client: + type: object + properties: + id: + type: string + description: The client id to use. + secret: + type: string + description: The client secret to use, if any. + required: [ id ] + scopes: + type: array + items: + type: string + description: The scopes, if any, to request the token for. + audiences: + type: array + items: + type: string + description: The audiences, if any, to request the token for. + username: + type: string + description: The username to use. Used only if the grant type is Password. + password: + type: string + description: The password to use. Used only if the grant type is Password. + subject: + $ref: '#/$defs/oauth2Token' + description: The security token that represents the identity of the party on behalf of whom the request is being made. + actor: + $ref: '#/$defs/oauth2Token' + description: The security token that represents the identity of the acting party. + required: [ authority, grant, client ] + - $ref: '#/$defs/secretBasedAuthenticationPolicy' + required: [ oauth2 ] + description: Use OAUTH2 authentication. + description: Defines an authentication policy. + oauth2Token: + type: object + properties: + token: + type: string + description: The security token to use to use. + type: + type: string + description: The type of the security token to use to use. + required: [ token, type ] + duration: + type: object + minProperties: 1 + properties: + days: + type: integer + description: Number of days, if any. + hours: + type: integer + description: Number of days, if any. + minutes: + type: integer + description: Number of minutes, if any. + seconds: + type: integer + description: Number of seconds, if any. + milliseconds: + type: integer + description: Number of milliseconds, if any. + description: The definition of a duration. + error: + type: object + properties: + type: + type: string + format: uri + description: A URI reference that identifies the error type. + status: + type: integer + description: The status code generated by the origin for this occurrence of the error. + instance: + type: string + format: json-pointer + description: A JSON Pointer used to reference the component the error originates from. + title: + type: string + description: A short, human-readable summary of the error. + detail: + type: string + description: A human-readable explanation specific to this occurrence of the error. + required: [ type, status, instance ] + endpoint: + type: object + properties: + uri: + type: string + format: uri-template + description: The endpoint's URI. + authentication: + $ref: '#/$defs/referenceableAuthenticationPolicy' + description: The authentication policy to use. + required: [ uri ] + eventConsumptionStrategy: + type: object + oneOf: + - title: AllEventConsumptionStrategy + properties: + all: + type: array + items: + $ref: '#/$defs/eventFilter' + description: A list containing all the events that must be consumed. + required: [ all ] + - title: AnyEventConsumptionStrategy + properties: + any: + type: array + items: + $ref: '#/$defs/eventFilter' + description: A list containing any of the events to consume. + required: [ any ] + - title: OneEventConsumptionStrategy + properties: + one: + $ref: '#/$defs/eventFilter' + description: The single event to consume. + required: [ one ] + eventFilter: + type: object + properties: + with: + title: WithEvent + type: object + minProperties: 1 + properties: + id: + type: string + description: The event's unique identifier + source: + type: string + description: Identifies the context in which an event happened + type: + type: string + description: This attribute contains a value describing the type of event related to the originating occurrence. + time: + type: string + subject: + type: string + datacontenttype: + type: string + description: Content type of data value. This attribute enables data to carry any type of content, whereby format and encoding might differ from that of the chosen event format. + dataschema: + type: string + additionalProperties: true + description: An event filter is a mechanism used to selectively process or handle events based on predefined criteria, such as event type, source, or specific attributes. + correlate: + type: object + additionalProperties: + type: object + properties: + from: + type: string + description: A runtime expression used to extract the correlation value from the filtered event. + expect: + type: string + description: A constant or a runtime expression, if any, used to determine whether or not the extracted correlation value matches expectations. If not set, the first extracted value will be used as the correlation's expectation. + required: [ from ] + description: A correlation is a link between events and data, established by mapping event attributes to specific data attributes, allowing for coordinated processing or handling based on event characteristics. + required: [ with ] + description: An event filter is a mechanism used to selectively process or handle events based on predefined criteria, such as event type, source, or specific attributes. + extension: + type: object + properties: + extend: + type: string + enum: [ call, composite, emit, for, listen, raise, run, set, switch, try, wait, all ] + description: The type of task to extend. + when: + type: string + description: A runtime expression, if any, used to determine whether or not the extension should apply in the specified context. + before: + description: The task(s) to execute before the extended task, if any. + $ref: '#/$defs/taskList' + after: + description: The task(s) to execute after the extended task, if any. + $ref: '#/$defs/taskList' + required: [ extend ] + description: The definition of a an extension. + externalResource: + oneOf: + - type: string + format: uri + - title: ExternalResourceURI + type: object + properties: + uri: + type: string + format: uri + description: The endpoint's URI. + authentication: + $ref: '#/$defs/referenceableAuthenticationPolicy' + description: The authentication policy to use. + name: + type: string + description: The external resource's name, if any. + required: [ uri ] + input: + type: object + properties: + schema: + $ref: '#/$defs/schema' + description: The schema used to describe and validate the input of the workflow or task. + from: + oneOf: + - type: string + - type: object + description: A runtime expression, if any, used to mutate and/or filter the input of the workflow or task. + description: Configures the input of a workflow or task. + output: + type: object + properties: + schema: + $ref: '#/$defs/schema' + description: The schema used to describe and validate the output of the workflow or task. + as: + oneOf: + - type: string + - type: object + description: A runtime expression, if any, used to mutate and/or filter the output of the workflow or task. + description: Configures the output of a workflow or task. + export: + type: object + properties: + schema: + $ref: '#/$defs/schema' + description: The schema used to describe and validate the workflow context. + as: + oneOf: + - type: string + - type: object + description: A runtime expression, if any, used to export the output data to the context. + description: Set the content of the context. + retryPolicy: + type: object + properties: + when: + type: string + description: A runtime expression, if any, used to determine whether or not to retry running the task, in a given context. + exceptWhen: + type: string + description: A runtime expression used to determine whether or not to retry running the task, in a given context. + delay: + $ref: '#/$defs/duration' + description: The duration to wait between retry attempts. + backoff: + type: object + oneOf: + - title: ConstantBackoff + properties: + constant: + type: object + description: The definition of the constant backoff to use, if any. + required: [ constant ] + - title: ExponentialBackOff + properties: + exponential: + type: object + description: The definition of the exponential backoff to use, if any. + required: [ exponential ] + - title: LinearBackoff + properties: + linear: + type: object + description: The definition of the linear backoff to use, if any. + required: [ linear ] + description: The retry duration backoff. + limit: + type: object + properties: + attempt: + type: object + properties: + count: + type: integer + description: The maximum amount of retry attempts, if any. + duration: + $ref: '#/$defs/duration' + description: The maximum duration for each retry attempt. + duration: + $ref: '#/$defs/duration' + description: The duration limit, if any, for all retry attempts. + description: The retry limit, if any + jitter: + type: object + properties: + from: + $ref: '#/$defs/duration' + description: The minimum duration of the jitter range + to: + $ref: '#/$defs/duration' + description: The maximum duration of the jitter range + required: [ from, to ] + description: The parameters, if any, that control the randomness or variability of the delay between retry attempts. + description: Defines a retry policy. + schema: + type: object + properties: + format: + type: string + default: json + description: The schema's format. Defaults to 'json'. The (optional) version of the format can be set using ` + "{format}:{version}" + `. + oneOf: + - title: SchemaInline + properties: + document: + description: The schema's inline definition. + required: [ document ] + - title: SchemaExternal + properties: + resource: + $ref: '#/$defs/externalResource' + description: The schema's external resource. + required: [ resource ] + description: Represents the definition of a schema. + timeout: + type: object + properties: + after: + $ref: '#/$defs/duration' + description: The duration after which to timeout. + required: [ after ] + description: The definition of a timeout. +required: [ document, do ] +` diff --git a/parser/parser.go b/internal/load/load.go similarity index 59% rename from parser/parser.go rename to internal/load/load.go index 7b7ad93..66ff7c3 100644 --- a/parser/parser.go +++ b/internal/load/load.go @@ -1,4 +1,4 @@ -// Copyright 2020 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,10 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package parser +package load import ( - "encoding/json" "fmt" "os" "path/filepath" @@ -23,8 +22,8 @@ import ( "sigs.k8s.io/yaml" - "github.com/serverlessworkflow/sdk-go/v2/model" - val "github.com/serverlessworkflow/sdk-go/v2/validator" + "github.com/serverlessworkflow/sdk-go/v4/graph" + "github.com/serverlessworkflow/sdk-go/v4/internal/dsl" ) const ( @@ -35,42 +34,48 @@ const ( var supportedExt = []string{extYAML, extYML, extJSON} -// FromYAMLSource parses the given Serverless Workflow YAML source into the Workflow type. -func FromYAMLSource(source []byte) (workflow *model.Workflow, err error) { - var jsonBytes []byte - if jsonBytes, err = yaml.YAMLToJSON(source); err != nil { - return nil, err +func FromFile(path string) (*graph.Node, []byte, error) { + if err := checkFilePath(path); err != nil { + return nil, nil, err } - return FromJSONSource(jsonBytes) -} -// FromJSONSource parses the given Serverless Workflow JSON source into the Workflow type. -func FromJSONSource(source []byte) (workflow *model.Workflow, err error) { - workflow = &model.Workflow{} - if err := json.Unmarshal(source, workflow); err != nil { - return nil, err + fileBytes, err := os.ReadFile(filepath.Clean(path)) + if err != nil { + return nil, nil, err } - ctx := model.NewValidatorContext(workflow) - if err := val.GetValidator().StructCtx(ctx, workflow); err != nil { - return nil, val.WorkflowError(err) + if strings.HasSuffix(path, extYAML) || strings.HasSuffix(path, extYML) { + return FromYAMLSource(fileBytes) } - return workflow, nil + + return FromJSONSource(fileBytes) } -// FromFile parses the given Serverless Workflow file into the Workflow type. -func FromFile(path string) (*model.Workflow, error) { - if err := checkFilePath(path); err != nil { - return nil, err +func FromYAMLSource(source []byte) (*graph.Node, []byte, error) { + jsonBytes, err := yaml.YAMLToJSON(source) + if err != nil { + return nil, nil, err } - fileBytes, err := os.ReadFile(filepath.Clean(path)) + return FromJSONSource(jsonBytes) +} + +func FromJSONSource(fileBytes []byte) (*graph.Node, []byte, error) { + root, err := graph.UnmarshalJSON(fileBytes) if err != nil { - return nil, err + return nil, nil, err } - if strings.HasSuffix(path, extYAML) || strings.HasSuffix(path, extYML) { - return FromYAMLSource(fileBytes) + + err = graph.LoadExternalResource(root) + if err != nil { + return nil, nil, err } - return FromJSONSource(fileBytes) + + err = dsl.ApplyDefault(root) + if err != nil { + return nil, nil, err + } + + return root, fileBytes, nil } // checkFilePath verifies if the file exists in the given path and if it's supported by the parser package diff --git a/internal/util/path.go b/internal/util/path.go new file mode 100644 index 0000000..52fa0db --- /dev/null +++ b/internal/util/path.go @@ -0,0 +1,53 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + "fmt" + "os" + "path/filepath" + "sync/atomic" +) + +var defaultIncludePaths atomic.Value + +// IncludePaths will return the search path for non-absolute import file +func IncludePaths() []string { + return defaultIncludePaths.Load().([]string) +} + +// SetIncludePaths will update the search path for non-absolute import file +func SetIncludePaths(paths []string) { + for _, path := range paths { + if !filepath.IsAbs(path) { + panic(fmt.Errorf("%s must be an absolute file path", path)) + } + } + + defaultIncludePaths.Store(paths) +} + +func init() { + // No execute set include path to suport webassembly + if WebAssembly() { + return + } + + wd, err := os.Getwd() + if err != nil { + panic(err) + } + SetIncludePaths([]string{wd}) +} diff --git a/model/doc.go b/internal/util/webassembly.go similarity index 75% rename from model/doc.go rename to internal/util/webassembly.go index 1508354..238b883 100644 --- a/model/doc.go +++ b/internal/util/webassembly.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,7 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package util -// +k8s:deepcopy-gen=package -// +k8s:deepcopy-gen:nonpointer-interfaces=true +import ( + "runtime" +) + +func WebAssembly() bool { + return runtime.GOOS == "js" && runtime.GOARCH == "wasm" +} diff --git a/util/unmarshal_benchmark_test.go b/internal/validator/errors.go similarity index 64% rename from util/unmarshal_benchmark_test.go rename to internal/validator/errors.go index 1a81b41..aeb93af 100644 --- a/util/unmarshal_benchmark_test.go +++ b/internal/validator/errors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,20 +12,22 @@ // See the License for the specific language governing permissions and // limitations under the License. -package util +package validator import ( "fmt" - "testing" + + "github.com/xeipuuv/gojsonschema" ) -func Benchmark_IncludePaths_Parallel(b *testing.B) { - b.RunParallel(func(p *testing.PB) { - i := 0 - for p.Next() { - IncludePaths() - SetIncludePaths([]string{fmt.Sprintf("%v", i)}) - i++ - } - }) +type Errors struct { + errors []gojsonschema.ResultError +} + +func (err *Errors) Error() string { + errors := "" + for _, desc := range err.errors { + errors = fmt.Sprintf("%s\n%s", errors, desc) + } + return errors } diff --git a/validator/tags.go b/internal/validator/integrity.go similarity index 75% rename from validator/tags.go rename to internal/validator/integrity.go index e568aba..394e9eb 100644 --- a/validator/tags.go +++ b/internal/validator/integrity.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,7 +14,9 @@ package validator -const ( - // TagISO8601Duration is the validate tag for iso8601 time duration format - TagISO8601Duration = "iso8601duration" -) +import "github.com/serverlessworkflow/sdk-go/v4/graph" + +func integrityValidate(root *graph.Node) error { + + return nil +} diff --git a/internal/validator/validator.go b/internal/validator/validator.go new file mode 100644 index 0000000..eed849c --- /dev/null +++ b/internal/validator/validator.go @@ -0,0 +1,71 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validator + +import ( + "bytes" + "log" + + "github.com/santhosh-tekuri/jsonschema/v6" + "sigs.k8s.io/yaml" + + "github.com/serverlessworkflow/sdk-go/v4/graph" + "github.com/serverlessworkflow/sdk-go/v4/internal/dsl" +) + +var schema *jsonschema.Schema + +func Valid(root *graph.Node, source []byte) error { + inst, err := jsonschema.UnmarshalJSON(bytes.NewReader(source)) + if err != nil { + return err + } + + err = schema.Validate(inst) + if err != nil { + return err + } + + err = integrityValidate(root) + if err != nil { + return err + } + + return nil +} + +func init() { + var err error + + jsonBytes, err := yaml.YAMLToJSON([]byte(dsl.DSLSpec)) + if err != nil { + log.Fatal(err) + } + readerJsonSchema, err := jsonschema.UnmarshalJSON(bytes.NewReader(jsonBytes)) + if err != nil { + log.Fatal(err) + } + + c := jsonschema.NewCompiler() + err = c.AddResource("dslspec.json", readerJsonSchema) + if err != nil { + log.Fatal(err) + } + + schema, err = c.Compile("dslspec.json") + if err != nil { + log.Fatal(err) + } +} diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/k8s_workflow_integration.go index 0f929c0..3eed4fa 100644 --- a/kubernetes/k8s_workflow_integration.go +++ b/kubernetes/k8s_workflow_integration.go @@ -15,7 +15,6 @@ package kubernetes import ( - "github.com/serverlessworkflow/sdk-go/v2/model" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" ) @@ -37,7 +36,7 @@ import ( // ServerlessWorkflowSpec defines a base API for integration test with operator-sdk type ServerlessWorkflowSpec struct { - model.Workflow `json:",inline"` + // model.Workflow `json:",inline"` } // ServerlessWorkflow ... diff --git a/model/action.go b/model/action.go deleted file mode 100644 index 7bc4fba..0000000 --- a/model/action.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// Action specify invocations of services or other workflows during workflow execution. -// +builder-gen:new-call=ApplyDefault -type Action struct { - // Defines Unique action identifier. - // +optional - ID string `json:"id,omitempty"` - // Defines Unique action name. - // +optional - Name string `json:"name,omitempty"` - // References a reusable function definition. - // +optional - FunctionRef *FunctionRef `json:"functionRef,omitempty"` - // References a 'trigger' and 'result' reusable event definitions. - // +optional - EventRef *EventRef `json:"eventRef,omitempty"` - // References a workflow to be invoked. - // +optional - SubFlowRef *WorkflowRef `json:"subFlowRef,omitempty"` - // Defines time period workflow execution should sleep before / after function execution. - // +optional - Sleep *Sleep `json:"sleep,omitempty"` - // References a defined workflow retry definition. If not defined uses the default runtime retry definition. - // +optional - RetryRef string `json:"retryRef,omitempty"` - // List of unique references to defined workflow errors for which the action should not be retried. - // Used only when `autoRetries` is set to `true` - // +optional - NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` - // List of unique references to defined workflow errors for which the action should be retried. - // Used only when `autoRetries` is set to `false` - // +optional - RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` - // Filter the state data to select only the data that can be used within function definition arguments - // using its fromStateData property. Filter the action results to select only the result data that should - // be added/merged back into the state data using its results property. Select the part of state data which - // the action data results should be added/merged to using the toStateData property. - // +optional - ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` - // Expression, if defined, must evaluate to true for this action to be performed. If false, action is disregarded. - // +optional - Condition string `json:"condition,omitempty"` -} - -type actionUnmarshal Action - -// UnmarshalJSON implements json.Unmarshaler -func (a *Action) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("action", data, (*actionUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action -func (a *Action) ApplyDefault() { - a.ActionDataFilter.ApplyDefault() -} - -// FunctionRef defines the reference to a reusable function definition -// +builder-gen:new-call=ApplyDefault -type FunctionRef struct { - // Name of the referenced function. - // +kubebuilder:validation:Required - RefName string `json:"refName" validate:"required"` - // Arguments (inputs) to be passed to the referenced function - // +optional - // TODO: validate it as required if function type is graphql - Arguments map[string]Object `json:"arguments,omitempty"` - // Used if function type is graphql. String containing a valid GraphQL selection set. - // TODO: validate it as required if function type is graphql - // +optional - SelectionSet string `json:"selectionSet,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type functionRefUnmarshal FunctionRef - -// UnmarshalJSON implements json.Unmarshaler -func (f *FunctionRef) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("functionRef", data, &f.RefName, (*functionRefUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function Ref -func (f *FunctionRef) ApplyDefault() { - f.Invoke = InvokeKindSync -} - -// Sleep defines time periods workflow execution should sleep before & after function execution -type Sleep struct { - // Defines amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - Before string `json:"before,omitempty" validate:"omitempty,iso8601duration"` - // Defines amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. - // Does not apply if 'eventRef' is defined. - // +optional - After string `json:"after,omitempty" validate:"omitempty,iso8601duration"` -} - -type sleepUnmarshal Sleep - -// UnmarshalJSON implements json.Unmarshaler -func (s *Sleep) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("sleep", data, (*sleepUnmarshal)(s)) -} diff --git a/model/action_data_filter.go b/model/action_data_filter.go deleted file mode 100644 index e929f6b..0000000 --- a/model/action_data_filter.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// ActionDataFilter used to filter action data results. -// +optional -// +builder-gen:new-call=ApplyDefault -type ActionDataFilter struct { - // Workflow expression that filters state data that can be used by the action. - // +optional - FromStateData string `json:"fromStateData,omitempty"` - // If set to false, action data results are not added/merged to state data. In this case 'results' - // and 'toStateData' should be ignored. Default is true. - // +optional - UseResults bool `json:"useResults,omitempty"` - // Workflow expression that filters the actions data results. - // +optional - Results string `json:"results,omitempty"` - // Workflow expression that selects a state data element to which the action results should be - // added/merged into. If not specified denotes the top-level state data element. - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type actionDataFilterUnmarshal ActionDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (a *ActionDataFilter) UnmarshalJSON(data []byte) error { - a.ApplyDefault() - return util.UnmarshalObject("actionDataFilter", data, (*actionDataFilterUnmarshal)(a)) -} - -// ApplyDefault set the default values for Action Data Filter -func (a *ActionDataFilter) ApplyDefault() { - a.UseResults = true -} diff --git a/model/action_data_filter_test.go b/model/action_data_filter_test.go deleted file mode 100644 index cae511a..0000000 --- a/model/action_data_filter_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestActionDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ActionDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: true, - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"fromStateData": "1", "results": "2", "toStateData": "3", "useResults": false}`, - expect: ActionDataFilter{ - FromStateData: "1", - Results: "2", - ToStateData: "3", - UseResults: false, - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: ActionDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"fromStateData": 1, "results": "2", "toStateData": "3"}`, - expect: ActionDataFilter{}, - err: `actionDataFilter.fromStateData must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ActionDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_test.go b/model/action_test.go deleted file mode 100644 index 55c399d..0000000 --- a/model/action_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFunctionRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect FunctionRef - err string - } - - testCases := []testCase{ - { - desp: "invalid object refName", - data: `{"refName": 1}`, - expect: FunctionRef{}, - err: "functionRef.refName must be string", - }, - { - desp: "object with refName", - data: `{"refName": "function name"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "object with refName and Invoke", - data: `{"refName": "function name", "invoke": "async"}`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "refName string", - data: `"function name"`, - expect: FunctionRef{ - RefName: "function name", - Invoke: InvokeKindSync, - }, - err: ``, - }, - } - - for _, tc := range testCases[:1] { - t.Run(tc.desp, func(t *testing.T) { - var v FunctionRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/action_validator.go b/model/action_validator.go deleted file mode 100644 index 384469b..0000000 --- a/model/action_validator.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(actionStructLevelValidationCtx), Action{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(functionRefStructLevelValidation), FunctionRef{}) -} - -func actionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - action := structLevel.Current().Interface().(Action) - - if action.FunctionRef == nil && action.EventRef == nil && action.SubFlowRef == nil { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", "required_without", "") - return - } - - values := []bool{ - action.FunctionRef != nil, - action.EventRef != nil, - action.SubFlowRef != nil, - } - - if validationNotExclusiveParamters(values) { - structLevel.ReportError(action.FunctionRef, "FunctionRef", "FunctionRef", val.TagExclusive, "") - structLevel.ReportError(action.EventRef, "EventRef", "EventRef", val.TagExclusive, "") - structLevel.ReportError(action.SubFlowRef, "SubFlowRef", "SubFlowRef", val.TagExclusive, "") - } - - if action.RetryRef != "" && !ctx.ExistRetry(action.RetryRef) { - structLevel.ReportError(action.RetryRef, "RetryRef", "RetryRef", val.TagExists, "") - } -} - -func functionRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - functionRef := structLevel.Current().Interface().(FunctionRef) - if !ctx.ExistFunction(functionRef.RefName) { - structLevel.ReportError(functionRef.RefName, "RefName", "RefName", val.TagExists, functionRef.RefName) - } -} diff --git a/model/action_validator_test.go b/model/action_validator_test.go deleted file mode 100644 index 5445f7b..0000000 --- a/model/action_validator_test.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildActionByOperationState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.OperationState.Actions = append(state.OperationState.Actions, action) - return &state.OperationState.Actions[len(state.OperationState.Actions)-1] -} - -func buildActionByForEachState(state *State, name string) *Action { - action := Action{ - Name: name, - } - - state.ForEachState.Actions = append(state.ForEachState.Actions, action) - return &state.ForEachState.Actions[len(state.ForEachState.Actions)-1] -} - -func buildActionByBranch(branch *Branch, name string) *Action { - action := Action{ - Name: name, - } - - branch.Actions = append(branch.Actions, action) - return &branch.Actions[len(branch.Actions)-1] -} - -func buildFunctionRef(workflow *Workflow, action *Action, name string) (*FunctionRef, *Function) { - function := Function{ - Name: name, - Operation: "http://function/function_name", - Type: FunctionTypeREST, - } - - functionRef := FunctionRef{ - RefName: name, - Invoke: InvokeKindSync, - } - action.FunctionRef = &functionRef - - workflow.Functions = append(workflow.Functions, function) - return &functionRef, &function -} - -func buildRetryRef(workflow *Workflow, action *Action, name string) { - retry := Retry{ - Name: name, - } - - workflow.Retries = append(workflow.Retries, retry) - action.RetryRef = name -} - -func buildSleep(action *Action) *Sleep { - action.Sleep = &Sleep{ - Before: "PT5S", - After: "PT5S", - } - return action.Sleep -} - -func TestActionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "require_without", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef = nil - return *model - }, - Err: `workflow.states[0].actions[0].functionRef required when "eventRef" or "subFlowRef" is not defined`, - }, - { - Desp: "exclude", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventRef(model, &model.States[0].OperationState.Actions[0], "event 1", "event2") - return *model - }, - Err: `workflow.states[0].actions[0].functionRef exclusive -workflow.states[0].actions[0].eventRef exclusive -workflow.states[0].actions[0].subFlowRef exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestFunctionRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].FunctionRef.RefName = "invalid function" - return *model - }, - Err: `workflow.states[0].actions[0].functionRef.refName don't exist "invalid function"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildSleep(action1) - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "" - model.States[0].OperationState.Actions[0].Sleep.After = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].Sleep.Before = "P5S" - model.States[0].OperationState.Actions[0].Sleep.After = "P5S" - return *model - }, - Err: `workflow.states[0].actions[0].sleep.before invalid iso8601 duration "P5S" -workflow.states[0].actions[0].sleep.after invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} diff --git a/model/auth.go b/model/auth.go deleted file mode 100644 index 6632265..0000000 --- a/model/auth.go +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// AuthType can be "basic", "bearer", or "oauth2". Default is "basic" -type AuthType string - -func (i AuthType) KindValues() []string { - return []string{ - string(AuthTypeBasic), - string(AuthTypeBearer), - string(AuthTypeOAuth2), - } -} - -func (i AuthType) String() string { - return string(i) -} - -const ( - // AuthTypeBasic ... - AuthTypeBasic AuthType = "basic" - // AuthTypeBearer ... - AuthTypeBearer AuthType = "bearer" - // AuthTypeOAuth2 ... - AuthTypeOAuth2 AuthType = "oauth2" -) - -// GrantType ... -type GrantType string - -func (i GrantType) KindValues() []string { - return []string{ - string(GrantTypePassword), - string(GrantTypeClientCredentials), - string(GrantTypeTokenExchange), - } -} - -func (i GrantType) String() string { - return string(i) -} - -const ( - // GrantTypePassword ... - GrantTypePassword GrantType = "password" - // GrantTypeClientCredentials ... - GrantTypeClientCredentials GrantType = "clientCredentials" - // GrantTypeTokenExchange ... - GrantTypeTokenExchange GrantType = "tokenExchange" -) - -// Auth definitions can be used to define authentication information that should be applied to resources -// defined in the operation property of function definitions. It is not used as authentication information -// for the function invocation, but just to access the resource containing the function invocation information. -type Auth struct { - // Unique auth definition name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Auth scheme, can be "basic", "bearer", or "oauth2". Default is "basic" - // +kubebuilder:validation:Enum=basic;bearer;oauth2 - // +kubebuilder:default=basic - // +kubebuilder:validation:Required - Scheme AuthType `json:"scheme" validate:"required,oneofkind"` - // Auth scheme properties. Can be one of "Basic properties definition", "Bearer properties definition", - // or "OAuth2 properties definition" - // +kubebuilder:validation:Required - Properties AuthProperties `json:"properties" validate:"required"` -} - -type authUnmarshal Auth - -// UnmarshalJSON Auth definition -func (a *Auth) UnmarshalJSON(data []byte) error { - authTmp := struct { - authUnmarshal - PropertiesRaw json.RawMessage `json:"properties"` - }{} - - err := util.UnmarshalObjectOrFile("auth", data, &authTmp) - if err != nil { - return err - } - - *a = Auth(authTmp.authUnmarshal) - if len(a.Scheme) == 0 { - a.Scheme = AuthTypeBasic - } - - switch a.Scheme { - case AuthTypeBasic: - a.Properties.Basic = &BasicAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Basic) - case AuthTypeBearer: - a.Properties.Bearer = &BearerAuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.Bearer) - case AuthTypeOAuth2: - a.Properties.OAuth2 = &OAuth2AuthProperties{} - return util.UnmarshalObject("properties", authTmp.PropertiesRaw, a.Properties.OAuth2) - default: - return fmt.Errorf("failed to parse auth properties") - } -} - -func (a *Auth) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - Name string `json:"name" validate:"required"` - Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` - Properties AuthProperties `json:"properties" validate:"required"` - }{ - Name: a.Name, - Scheme: a.Scheme, - Properties: a.Properties, - }) - if err != nil { - fmt.Println(err) - } - st := strings.Replace(string(custom), "null,", "", 1) - st = strings.Replace(st, "\"Basic\":", "", 1) - st = strings.Replace(st, "\"Oauth2\":", "", 1) - st = strings.Replace(st, "\"Bearer\":", "", 1) - st = strings.Replace(st, "{{", "{", 1) - st = strings.TrimSuffix(st, "}") - return []byte(st), nil -} - -// AuthProperties ... -type AuthProperties struct { - Basic *BasicAuthProperties `json:",omitempty"` - Bearer *BearerAuthProperties `json:",omitempty"` - OAuth2 *OAuth2AuthProperties `json:",omitempty"` -} - -// BasicAuthProperties Basic Auth Info -type BasicAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Username String or a workflow expression. Contains the username - // +kubebuilder:validation:Required - Username string `json:"username" validate:"required"` - // Password String or a workflow expression. Contains the user password - // +kubebuilder:validation:Required - Password string `json:"password" validate:"required"` -} - -// BearerAuthProperties Bearer auth information -type BearerAuthProperties struct { - Common `json:",inline"` - // Secret Expression referencing a workflow secret that contains all needed auth info - // +optional - Secret string `json:"secret,omitempty"` - // Token String or a workflow expression. Contains the token - // +kubebuilder:validation:Required - Token string `json:"token" validate:"required"` -} - -// OAuth2AuthProperties OAuth2 information -type OAuth2AuthProperties struct { - Common `json:",inline"` - // Expression referencing a workflow secret that contains all needed auth info. - // +optional - Secret string `json:"secret,omitempty"` - // String or a workflow expression. Contains the authority information. - // +optional - Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` - // Defines the grant type. Can be "password", "clientCredentials", or "tokenExchange" - // +kubebuilder:validation:Enum=password;clientCredentials;tokenExchange - // +kubebuilder:validation:Required - GrantType GrantType `json:"grantType" validate:"required,oneofkind"` - // String or a workflow expression. Contains the client identifier. - // +kubebuilder:validation:Required - ClientID string `json:"clientId" validate:"required"` - // Workflow secret or a workflow expression. Contains the client secret. - // +optional - ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 scopes. - // +optional - Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner'. - // +optional - Username string `json:"username,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner'. - // +optional - Password string `json:"password,omitempty" validate:"omitempty,min=1"` - // Array containing strings or workflow expressions. Contains the OAuth2 audiences. - // +optional - Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the subject token. - // +optional - SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested subject. - // +optional - RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` - // String or a workflow expression. Contains the requested issuer. - // +optional - RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` -} - -// TODO: use reflection to unmarshal the keys and think on a generic approach to handle them diff --git a/model/auth_test.go b/model/auth_test.go deleted file mode 100644 index 60602a2..0000000 --- a/model/auth_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestUnmarshalJSONMultipleAuthProperties(t *testing.T) { - t.Run("BearerAuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "bearer", - "properties": { - "token": "token1" - } - }` - a2JSON := `{ - "name": "a2", - "scheme": "bearer", - "properties": { - "token": "token2" - } - }` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.Bearer - a2Properties := a2.Properties.Bearer - - assert.Equal(t, "token1", a1Properties.Token) - assert.Equal(t, "token2", a2Properties.Token) - assert.NotEqual(t, a1Properties, a2Properties) - }) - - t.Run("OAuth2AuthProperties", func(t *testing.T) { - a1JSON := `{ - "name": "a1", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret1" - } -}` - - a2JSON := `{ - "name": "a2", - "scheme": "oauth2", - "properties": { - "clientSecret": "secret2" - } -}` - - var a1 Auth - err := json.Unmarshal([]byte(a1JSON), &a1) - assert.NoError(t, err) - - var a2 Auth - err = json.Unmarshal([]byte(a2JSON), &a2) - assert.NoError(t, err) - - a1Properties := a1.Properties.OAuth2 - a2Properties := a2.Properties.OAuth2 - - assert.Equal(t, "secret1", a1Properties.ClientSecret) - assert.Equal(t, "secret2", a2Properties.ClientSecret) - assert.NotEqual(t, a1Properties, a2Properties) - }) -} diff --git a/model/auth_validator_test.go b/model/auth_validator_test.go deleted file mode 100644 index e2ce55d..0000000 --- a/model/auth_validator_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildAuth(workflow *Workflow, name string) *Auth { - auth := Auth{ - Name: name, - Scheme: AuthTypeBasic, - } - workflow.Auth = append(workflow.Auth, auth) - return &workflow.Auth[len(workflow.Auth)-1] -} - -func buildBasicAuthProperties(auth *Auth) *BasicAuthProperties { - auth.Scheme = AuthTypeBasic - auth.Properties = AuthProperties{ - Basic: &BasicAuthProperties{ - Username: "username", - Password: "password", - }, - } - - return auth.Properties.Basic -} - -func buildOAuth2AuthProperties(auth *Auth) *OAuth2AuthProperties { - auth.Scheme = AuthTypeOAuth2 - auth.Properties = AuthProperties{ - OAuth2: &OAuth2AuthProperties{ - ClientID: "clientId", - GrantType: GrantTypePassword, - }, - } - - return auth.Properties.OAuth2 -} - -func buildBearerAuthProperties(auth *Auth) *BearerAuthProperties { - auth.Scheme = AuthTypeBearer - auth.Properties = AuthProperties{ - Bearer: &BearerAuthProperties{ - Token: "token", - }, - } - - return auth.Properties.Bearer -} - -func TestAuthStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Name = "" - return *model - }, - Err: `workflow.auth[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth = append(model.Auth, model.Auth[0]) - return *model - }, - Err: `workflow.auth has duplicate "name"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestBasicAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBasicAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Basic.Username = "" - model.Auth[0].Properties.Basic.Password = "" - return *model - }, - Err: `workflow.auth[0].properties.basic.username is required -workflow.auth[0].properties.basic.password is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBearerAuthPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildBearerAuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.Bearer.Token = "" - return *model - }, - Err: `workflow.auth[0].properties.bearer.token is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOAuth2AuthPropertiesPropertiesStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - auth := buildAuth(baseWorkflow, "auth 1") - buildOAuth2AuthProperties(auth) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = "" - model.Auth[0].Properties.OAuth2.ClientID = "" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType is required -workflow.auth[0].properties.oAuth2.clientID is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Auth[0].Properties.OAuth2.GrantType = GrantTypePassword + "invalid" - return *model - }, - Err: `workflow.auth[0].properties.oAuth2.grantType need by one of [password clientCredentials tokenExchange]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/callback_state.go b/model/callback_state.go deleted file mode 100644 index 1dadcb6..0000000 --- a/model/callback_state.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// CallbackState executes a function and waits for callback event that indicates completion of the task. -type CallbackState struct { - // Defines the action to be executed. - // +kubebuilder:validation:Required - Action Action `json:"action"` - // References a unique callback event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Time period to wait for incoming events (ISO 8601 format) - // +optional - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` -} - -func (c *CallbackState) MarshalJSON() ([]byte, error) { - type Alias CallbackState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *CallbackStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(c), - Timeouts: c.Timeouts, - }) - return custom, err -} - -// CallbackStateTimeout defines timeout settings for callback state -type CallbackStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/callback_state_validator_test.go b/model/callback_state_validator_test.go deleted file mode 100644 index a89cea9..0000000 --- a/model/callback_state_validator_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildCallbackState(workflow *Workflow, name, eventRef string) *State { - consumeEvent := Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, consumeEvent) - - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeCallback, - }, - CallbackState: &CallbackState{ - EventRef: eventRef, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func buildCallbackStateTimeout(callbackState *CallbackState) *CallbackStateTimeout { - callbackState.Timeouts = &CallbackStateTimeout{} - return callbackState.Timeouts -} - -func TestCallbackStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.EventRef = "" - return *model - }, - Err: `workflow.states[0].callbackState.eventRef is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestCallbackStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: `success`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: `omitempty`, - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "" - model.States[0].CallbackState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].CallbackState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].CallbackState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].callbackState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].callbackState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/delay_state.go b/model/delay_state.go deleted file mode 100644 index 3227e74..0000000 --- a/model/delay_state.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "encoding/json" - -// DelayState Causes the workflow execution to delay for a specified duration -type DelayState struct { - // Amount of time (ISO 8601 format) to delay - // +kubebuilder:validation:Required - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` -} - -func (a *DelayState) MarshalJSON() ([]byte, error) { - custom, err := json.Marshal(&struct { - TimeDelay string `json:"timeDelay" validate:"required,iso8601duration"` - }{ - TimeDelay: a.TimeDelay, - }) - return custom, err -} diff --git a/model/delay_state_test.go b/model/delay_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/delay_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/delay_state_validator_test.go b/model/delay_state_validator_test.go deleted file mode 100644 index aed36c5..0000000 --- a/model/delay_state_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildDelayState(workflow *Workflow, name, timeDelay string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeDelay, - }, - DelayState: &DelayState{ - TimeDelay: timeDelay, - }, - } - workflow.States = append(workflow.States, state) - - return &workflow.States[len(workflow.States)-1] -} - -func TestDelayStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - delayState := buildDelayState(baseWorkflow, "start state", "PT5S") - buildEndByState(delayState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].DelayState.TimeDelay = "P5S" - return *model - }, - Err: `workflow.states[0].delayState.timeDelay invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event.go b/model/event.go deleted file mode 100644 index 96069bf..0000000 --- a/model/event.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventKind defines this event as either `consumed` or `produced` -type EventKind string - -func (i EventKind) KindValues() []string { - return []string{ - string(EventKindConsumed), - string(EventKindProduced), - } -} - -func (i EventKind) String() string { - return string(i) -} - -const ( - // EventKindConsumed means the event continuation of workflow instance execution - EventKindConsumed EventKind = "consumed" - - // EventKindProduced means the event was created during workflow instance execution - EventKindProduced EventKind = "produced" -) - -// Event used to define events and their correlations -// +builder-gen:new-call=ApplyDefault -type Event struct { - Common `json:",inline"` - // Unique event name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // CloudEvent source. - // +optional - Source string `json:"source,omitempty"` - // CloudEvent type. - // +kubebuilder:validation:Required - Type string `json:"type" validate:"required"` - // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Defaults to `consumed`. - // +kubebuilder:validation:Enum=consumed;produced - // +kubebuilder:default=consumed - Kind EventKind `json:"kind,omitempty" validate:"required,oneofkind"` - // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload - // and context attributes should be accessible. Defaults to true. - // +kubebuilder:default=true - // +optional - DataOnly bool `json:"dataOnly,omitempty"` - // Define event correlation rules for this event. Only used for consumed events. - // +optional - Correlation []Correlation `json:"correlation,omitempty" validate:"dive"` -} - -type eventUnmarshal Event - -// UnmarshalJSON unmarshal Event object from json bytes -func (e *Event) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("event", data, (*eventUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event -func (e *Event) ApplyDefault() { - e.DataOnly = true - e.Kind = EventKindConsumed -} - -// Correlation define event correlation rules for an event. Only used for `consumed` events -type Correlation struct { - // CloudEvent Extension Context Attribute name - // +kubebuilder:validation:Required - ContextAttributeName string `json:"contextAttributeName" validate:"required"` - // CloudEvent Extension Context Attribute value - // +optional - ContextAttributeValue string `json:"contextAttributeValue,omitempty"` -} - -// EventRef defining invocation of a function via event -// +builder-gen:new-call=ApplyDefault -type EventRef struct { - // Reference to the unique name of a 'produced' event definition, - // +kubebuilder:validation:Required - TriggerEventRef string `json:"triggerEventRef" validate:"required"` - // Reference to the unique name of a 'consumed' event definition - // +kubebuilder:validation:Required - ResultEventRef string `json:"resultEventRef" validate:"required"` - // Maximum amount of time (ISO 8601 format) to wait for the result event. If not defined it be set to the - // actionExecutionTimeout - // +optional - ResultEventTimeout string `json:"resultEventTimeout,omitempty" validate:"omitempty,iso8601duration"` - // If string type, an expression which selects parts of the states data output to become the data (payload) - // of the event referenced by triggerEventRef. If object type, a custom object to become the data (payload) - // of the event referenced by triggerEventRef. - // +optional - Data *Object `json:"data,omitempty"` - // Add additional extension context attributes to the produced event. - // +optional - ContextAttributes map[string]Object `json:"contextAttributes,omitempty"` - // Specifies if the function should be invoked sync or async. Default is sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` -} - -type eventRefUnmarshal EventRef - -// UnmarshalJSON implements json.Unmarshaler -func (e *EventRef) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventRef", data, (*eventRefUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event Ref -func (e *EventRef) ApplyDefault() { - e.Invoke = InvokeKindSync -} diff --git a/model/event_data_filter.go b/model/event_data_filter.go deleted file mode 100644 index 1db5bbf..0000000 --- a/model/event_data_filter.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// EventDataFilter used to filter consumed event payloads. -// +builder-gen:new-call=ApplyDefault -type EventDataFilter struct { - // If set to false, event payload is not added/merged to state data. In this case 'data' and 'toStateData' - // should be ignored. Default is true. - // +optional - UseData bool `json:"useData,omitempty"` - // Workflow expression that filters of the event data (payload). - // +optional - Data string `json:"data,omitempty"` - // Workflow expression that selects a state data element to which the action results should be added/merged into. - // If not specified denotes the top-level state data element - // +optional - ToStateData string `json:"toStateData,omitempty"` -} - -type eventDataFilterUnmarshal EventDataFilter - -// UnmarshalJSON implements json.Unmarshaler -func (f *EventDataFilter) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("eventDataFilter", data, (*eventDataFilterUnmarshal)(f)) -} - -// ApplyDefault set the default values for Event Data Filter -func (f *EventDataFilter) ApplyDefault() { - f.UseData = true -} diff --git a/model/event_data_filter_test.go b/model/event_data_filter_test.go deleted file mode 100644 index e4bf979..0000000 --- a/model/event_data_filter_test.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventDataFilterUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventDataFilter - err string - } - testCases := []testCase{ - { - desp: "normal test", - data: `{"data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: true, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "add UseData to false", - data: `{"UseData": false, "data": "1", "toStateData": "2"}`, - expect: EventDataFilter{ - UseData: false, - Data: "1", - ToStateData: "2", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: EventDataFilter{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"data": 1, "toStateData": "2"}`, - expect: EventDataFilter{}, - err: `eventDataFilter.data must be string`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventDataFilter - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_state.go b/model/event_state.go deleted file mode 100644 index 39bd590..0000000 --- a/model/event_state.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// EventState await one or more events and perform actions when they are received. If defined as the -// workflow starting state, the event state definition controls when the workflow instances should be created. -// +builder-gen:new-call=ApplyDefault -type EventState struct { - // TODO: EventState doesn't have usedForCompensation field. - - // If true consuming one of the defined events causes its associated actions to be performed. If false all - // the defined events must be consumed in order for actions to be performed. Defaults to true. - // +kubebuilder:default=true - // +optional - Exclusive bool `json:"exclusive,omitempty"` - // Define the events to be consumed and optional actions to be performed. - // +kubebuilder:validation:MinItems=1 - OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // State specific timeouts. - // +optional - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` -} - -func (e *EventState) MarshalJSON() ([]byte, error) { - type Alias EventState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *EventStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(e), - Timeouts: e.Timeouts, - }) - return custom, err -} - -type eventStateUnmarshal EventState - -// UnmarshalJSON unmarshal EventState object from json bytes -func (e *EventState) UnmarshalJSON(data []byte) error { - e.ApplyDefault() - return util.UnmarshalObject("eventState", data, (*eventStateUnmarshal)(e)) -} - -// ApplyDefault set the default values for Event State -func (e *EventState) ApplyDefault() { - e.Exclusive = true -} - -// OnEvents define which actions are be performed for the one or more events. -// +builder-gen:new-call=ApplyDefault -type OnEvents struct { - // References one or more unique event names in the defined workflow events. - // +kubebuilder:validation:MinItems=1 - EventRefs []string `json:"eventRefs" validate:"required,min=1"` - // Should actions be performed sequentially or in parallel. Default is sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed if expression matches - // +optional - Actions []Action `json:"actions,omitempty" validate:"dive"` - // eventDataFilter defines the callback event data filter definition - // +optional - EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` -} - -type onEventsUnmarshal OnEvents - -// UnmarshalJSON unmarshal OnEvents object from json bytes -func (o *OnEvents) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("onEvents", data, (*onEventsUnmarshal)(o)) -} - -// ApplyDefault set the default values for On Events -func (o *OnEvents) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// EventStateTimeout defines timeout settings for event state -type EventStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Default timeout for consuming defined events (ISO 8601 duration format) - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/event_state_test.go b/model/event_state_test.go deleted file mode 100644 index 348aaea..0000000 --- a/model/event_state_test.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect State - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"name": "1", "type": "event", "exclusive": false, "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: false, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - { - desp: "default exclusive", - data: `{"name": "1", "type": "event", "onEvents": [{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}], "timeouts": {"actionExecTimeout": "PT5M", "eventTimeout": "PT5M", "stateExecTimeout": "PT5M"}}`, - expect: State{ - BaseState: BaseState{ - Name: "1", - Type: StateTypeEvent, - }, - EventState: &EventState{ - Exclusive: true, - OnEvents: []OnEvents{ - { - EventRefs: []string{"E1", "E2"}, - ActionMode: "parallel", - }, - }, - Timeouts: &EventStateTimeout{ - EventTimeout: "PT5M", - ActionExecTimeout: "PT5M", - StateExecTimeout: &StateExecTimeout{ - Total: "PT5M", - }, - }, - }, - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := State{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestOnEventsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OnEvents - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "default action mode", - data: `{"eventRefs": ["E1", "E2"]}`, - expect: OnEvents{ - EventRefs: []string{"E1", "E2"}, - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `"eventRefs": ["E1", "E2"], "actionMode": "parallel"}`, - expect: OnEvents{}, - err: `invalid character ':' after top-level value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OnEvents{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_state_validator.go b/model/event_state_validator.go deleted file mode 100644 index d4f2f40..0000000 --- a/model/event_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStateStructLevelValidationCtx), EventState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onEventsStructLevelValidationCtx), OnEvents{}) -} - -func eventStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // EventRefs -} - -func onEventsStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onEvent := structLevel.Current().Interface().(OnEvents) - for _, eventRef := range onEvent.EventRefs { - if eventRef != "" && !ctx.ExistEvent(eventRef) { - structLevel.ReportError(eventRef, "eventRefs", "EventRefs", val.TagExists, "") - } - } -} diff --git a/model/event_state_validator_test.go b/model/event_state_validator_test.go deleted file mode 100644 index ea7d319..0000000 --- a/model/event_state_validator_test.go +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildEventState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeEvent, - }, - EventState: &EventState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOnEvents(workflow *Workflow, state *State, name string) *OnEvents { - event := Event{ - Name: name, - Type: "type", - Kind: EventKindProduced, - } - workflow.Events = append(workflow.Events, event) - - state.EventState.OnEvents = append(state.EventState.OnEvents, OnEvents{ - EventRefs: []string{event.Name}, - ActionMode: ActionModeParallel, - }) - - return &state.EventState.OnEvents[len(state.EventState.OnEvents)-1] -} - -func buildEventStateTimeout(state *State) *EventStateTimeout { - state.EventState.Timeouts = &EventStateTimeout{ - ActionExecTimeout: "PT5S", - EventTimeout: "PT5S", - } - return state.EventState.Timeouts -} - -func TestEventStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = nil - return *model - }, - Err: `workflow.states[0].eventState.onEvents is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents = []OnEvents{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents must have the minimum 1`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestOnEventsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{"event not found"} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = nil - model.States[0].EventState.OnEvents[0].ActionMode = "" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs is required -workflow.states[0].eventState.onEvents[0].actionMode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].EventRefs = []string{} - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].eventRefs must have the minimum 1`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.OnEvents[0].ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].eventState.onEvents[0].actionMode need by one of [sequential parallel]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestEventStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - eventState := buildEventState(baseWorkflow, "start state") - buildEventStateTimeout(eventState) - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "" - model.States[0].EventState.Timeouts.EventTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].EventState.Timeouts.ActionExecTimeout = "P5S" - model.States[0].EventState.Timeouts.EventTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].eventState.timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].eventState.timeouts.eventTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/event_test.go b/model/event_test.go deleted file mode 100644 index f557c61..0000000 --- a/model/event_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEventRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect EventRef - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"invoke": "async"}`, - expect: EventRef{ - Invoke: InvokeKindAsync, - }, - err: ``, - }, - { - desp: "invoke unset", - data: `{}`, - expect: EventRef{ - Invoke: InvokeKindSync, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"invoke": 1}`, - expect: EventRef{}, - err: `eventRef.invoke must be sync or async`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v EventRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestEventUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Event - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"dataOnly": false, "kind": "produced"}`, - expect: Event{ - DataOnly: false, - Kind: EventKindProduced, - }, - err: ``, - }, - { - desp: "optional field dataOnly & kind unset", - data: `{}`, - expect: Event{ - DataOnly: true, - Kind: EventKindConsumed, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"dataOnly": "false", "kind": "produced"}`, - expect: Event{}, - err: `event.dataOnly must be bool`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Event - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/event_validator.go b/model/event_validator.go deleted file mode 100644 index 7b4daa9..0000000 --- a/model/event_validator.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventStructLevelValidation), Event{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventRefStructLevelValidation), EventRef{}) -} - -// eventStructLevelValidation custom validator for event kind consumed -func eventStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { -} - -func eventRefStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - model := structLevel.Current().Interface().(EventRef) - if model.TriggerEventRef != "" && !ctx.ExistEvent(model.TriggerEventRef) { - structLevel.ReportError(model.TriggerEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } - if model.ResultEventRef != "" && !ctx.ExistEvent(model.ResultEventRef) { - structLevel.ReportError(model.ResultEventRef, "triggerEventRef", "TriggerEventRef", val.TagExists, "") - } -} diff --git a/model/event_validator_test.go b/model/event_validator_test.go deleted file mode 100644 index 80340b0..0000000 --- a/model/event_validator_test.go +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildEventRef(workflow *Workflow, action *Action, triggerEvent, resultEvent string) *EventRef { - produceEvent := Event{ - Name: triggerEvent, - Type: "event type", - Kind: EventKindProduced, - } - - consumeEvent := Event{ - Name: resultEvent, - Type: "event type", - Kind: EventKindProduced, - } - - workflow.Events = append(workflow.Events, produceEvent) - workflow.Events = append(workflow.Events, consumeEvent) - - eventRef := &EventRef{ - TriggerEventRef: triggerEvent, - ResultEventRef: resultEvent, - Invoke: InvokeKindSync, - } - - action.EventRef = eventRef - return action.EventRef -} - -func buildCorrelation(event *Event) *Correlation { - event.Correlation = append(event.Correlation, Correlation{ - ContextAttributeName: "attribute name", - }) - - return &event.Correlation[len(event.Correlation)-1] -} - -func TestEventStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events = append(model.Events, model.Events[0]) - return *model - }, - Err: `workflow.events has duplicate "name"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Name = "" - model.Events[0].Type = "" - model.Events[0].Kind = "" - return *model - }, - Err: `workflow.events[0].name is required -workflow.events[0].type is required -workflow.events[0].kind is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Kind = EventKindConsumed + "invalid" - return *model - }, - Err: `workflow.events[0].kind need by one of [consumed produced]`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestCorrelationStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Events = Events{{ - Name: "event 1", - Type: "event type", - Kind: EventKindConsumed, - }} - - buildCorrelation(&baseWorkflow.Events[0]) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "empty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation = nil - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Events[0].Correlation[0].ContextAttributeName = "" - return *model - }, - Err: `workflow.events[0].correlation[0].contextAttributeName is required`, - }, - //TODO: Add test: correlation only used for `consumed` events - } - - StructLevelValidationCtx(t, testCases) -} - -func TestEventRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - eventRef := buildEventRef(baseWorkflow, action1, "event 1", "event 2") - eventRef.ResultEventTimeout = "PT1H" - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef is required -workflow.states[0].actions[0].eventRef.resultEventRef is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.TriggerEventRef = "invalid event" - model.States[0].OperationState.Actions[0].EventRef.ResultEventRef = "invalid event 2" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event" -workflow.states[0].actions[0].eventRef.triggerEventRef don't exist "invalid event 2"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.ResultEventTimeout = "10hs" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.resultEventTimeout invalid iso8601 duration "10hs"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].EventRef.Invoke = InvokeKindSync + "invalid" - return *model - }, - Err: `workflow.states[0].actions[0].eventRef.invoke need by one of [sync async]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/foreach_state.go b/model/foreach_state.go deleted file mode 100644 index aa19f4e..0000000 --- a/model/foreach_state.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) -type ForEachModeType string - -func (f ForEachModeType) KindValues() []string { - return []string{ - string(ForEachModeTypeSequential), - string(ForEachModeTypeParallel), - } -} - -func (f ForEachModeType) String() string { - return string(f) -} - -const ( - // ForEachModeTypeSequential specifies iterations should be done sequentially. - ForEachModeTypeSequential ForEachModeType = "sequential" - // ForEachModeTypeParallel specifies iterations should be done parallel. - ForEachModeTypeParallel ForEachModeType = "parallel" -) - -// ForEachState used to execute actions for each element of a data set. -// +builder-gen:new-call=ApplyDefault -type ForEachState struct { - // Workflow expression selecting an array element of the states' data. - // +kubebuilder:validation:Required - InputCollection string `json:"inputCollection" validate:"required"` - // Workflow expression specifying an array element of the states data to add the results of each iteration. - // +optional - OutputCollection string `json:"outputCollection,omitempty"` - // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, - // this param should contain a unique element of the inputCollection array. - // +optional - IterationParam string `json:"iterationParam,omitempty"` - // Specifies how many iterations may run in parallel at the same time. Used if mode property is set to - // parallel (default). If not specified, its value should be the size of the inputCollection. - // +optional - BatchSize *intstr.IntOrString `json:"batchSize,omitempty"` - // Actions to be executed for each of the elements of inputCollection. - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions,omitempty" validate:"required,min=0,dive"` - // State specific timeout. - // +optional - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - // Specifies how iterations are to be performed (sequential or in parallel), defaults to parallel. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=parallel - Mode ForEachModeType `json:"mode,omitempty" validate:"required,oneofkind"` -} - -func (f *ForEachState) MarshalJSON() ([]byte, error) { - type Alias ForEachState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ForEachStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(f), - Timeouts: f.Timeouts, - }) - return custom, err -} - -type forEachStateUnmarshal ForEachState - -// UnmarshalJSON implements json.Unmarshaler -func (f *ForEachState) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("forEachState", data, (*forEachStateUnmarshal)(f)) -} - -// ApplyDefault set the default values for ForEach State -func (f *ForEachState) ApplyDefault() { - f.Mode = ForEachModeTypeParallel -} - -// ForEachStateTimeout defines timeout settings for foreach state -type ForEachStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/foreach_state_test.go b/model/foreach_state_test.go deleted file mode 100644 index a10f7a9..0000000 --- a/model/foreach_state_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestForEachStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ForEachState - err string - } - testCases := []testCase{ - { - desp: "all field", - data: `{"mode": "sequential"}`, - expect: &ForEachState{ - Mode: ForEachModeTypeSequential, - }, - err: ``, - }, - { - desp: "mode unset", - data: `{}`, - expect: &ForEachState{ - Mode: ForEachModeTypeParallel, - }, - err: ``, - }, - { - desp: "invalid json format", - data: `{"mode": 1}`, - expect: nil, - err: `forEachState.mode must be sequential or parallel`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ForEachState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/foreach_state_validator.go b/model/foreach_state_validator.go deleted file mode 100644 index d1d9894..0000000 --- a/model/foreach_state_validator.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(forEachStateStructLevelValidation, ForEachState{}) -} - -// ForEachStateStructLevelValidation custom validator for ForEachState -func forEachStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - stateObj := structLevel.Current().Interface().(ForEachState) - - if stateObj.Mode != ForEachModeTypeParallel { - return - } - - if stateObj.BatchSize == nil { - return - } - - if !val.ValidateGt0IntStr(stateObj.BatchSize) { - structLevel.ReportError(reflect.ValueOf(stateObj.BatchSize), "BatchSize", "batchSize", "gt0", "") - } -} diff --git a/model/foreach_state_validator_test.go b/model/foreach_state_validator_test.go deleted file mode 100644 index 8fb49d0..0000000 --- a/model/foreach_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildForEachState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeForEach, - }, - ForEachState: &ForEachState{ - InputCollection: "3", - Mode: ForEachModeTypeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func TestForEachStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - forEachState := buildForEachState(baseWorkflow, "start state") - buildEndByState(forEachState, true, false) - action1 := buildActionByForEachState(forEachState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - } - return *model - }, - }, - { - Desp: "success without batch size", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = nil - return *model - }, - }, - { - Desp: "gt0 int", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel - model.States[0].ForEachState.BatchSize = &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - } - return *model - }, - Err: `workflow.states[0].forEachState.batchSize must be greater than 0`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Mode = ForEachModeTypeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].forEachState.mode need by one of [sequential parallel]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.InputCollection = "" - model.States[0].ForEachState.Mode = "" - model.States[0].ForEachState.Actions = nil - return *model - }, - Err: `workflow.states[0].forEachState.inputCollection is required -workflow.states[0].forEachState.actions is required -workflow.states[0].forEachState.mode is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ForEachState.Actions = []Action{} - return *model - }, - Err: ``, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestForEachStateTimeoutStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) -} diff --git a/model/function.go b/model/function.go deleted file mode 100644 index 7cf4197..0000000 --- a/model/function.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -const ( - // FunctionTypeREST a combination of the function/service OpenAPI definition document URI and the particular service - // operation that needs to be invoked, separated by a '#'. - FunctionTypeREST FunctionType = "rest" - // FunctionTypeRPC a combination of the gRPC proto document URI and the particular service name and service method - // name that needs to be invoked, separated by a '#'. - FunctionTypeRPC FunctionType = "rpc" - // FunctionTypeExpression defines the expression syntax. - FunctionTypeExpression FunctionType = "expression" - // FunctionTypeGraphQL a combination of the GraphQL schema definition URI and the particular service name and - // service method name that needs to be invoked, separated by a '#' - FunctionTypeGraphQL FunctionType = "graphql" - // FunctionTypeAsyncAPI a combination of the AsyncApi definition document URI and the particular service operation - // that needs to be invoked, separated by a '#' - FunctionTypeAsyncAPI FunctionType = "asyncapi" - // FunctionTypeOData a combination of the GraphQL schema definition URI and the particular service name and service - // method name that needs to be invoked, separated by a '#' - FunctionTypeOData FunctionType = "odata" - // FunctionTypeCustom property defines a list of function types that are set by the specification. Some runtime - // implementations might support additional function types that extend the ones defined in the specification - FunctionTypeCustom FunctionType = "custom" -) - -// FunctionType ... -type FunctionType string - -func (i FunctionType) KindValues() []string { - return []string{ - string(FunctionTypeREST), - string(FunctionTypeRPC), - string(FunctionTypeExpression), - string(FunctionTypeGraphQL), - string(FunctionTypeAsyncAPI), - string(FunctionTypeOData), - string(FunctionTypeCustom), - } -} - -func (i FunctionType) String() string { - return string(i) -} - -// Function ... -// +builder-gen:new-call=ApplyDefault -type Function struct { - Common `json:",inline"` - // Unique function name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // If type is `rest`, #. - // If type is `rpc`, ##. - // If type is `expression`, defines the workflow expression. If the type is `custom`, - // #. - // +kubebuilder:validation:Required - Operation string `json:"operation" validate:"required"` - // Defines the function type. Is either `custom`, `rest`, `rpc`, `expression`, `graphql`, `odata` or `asyncapi`. - // Default is `rest`. - // +kubebuilder:validation:Enum=rest;rpc;expression;graphql;odata;asyncapi;custom - // +kubebuilder:default=rest - Type FunctionType `json:"type,omitempty" validate:"required,oneofkind"` - // References an auth definition name to be used to access to resource defined in the operation parameter. - // +optional - AuthRef string `json:"authRef,omitempty"` -} - -type functionUnmarshal Function - -// UnmarshalJSON implements json unmarshaler interface -func (f *Function) UnmarshalJSON(data []byte) error { - f.ApplyDefault() - return util.UnmarshalObject("function", data, (*functionUnmarshal)(f)) -} - -// ApplyDefault set the default values for Function -func (f *Function) ApplyDefault() { - f.Type = FunctionTypeREST -} diff --git a/model/function_validator_test.go b/model/function_validator_test.go deleted file mode 100644 index fcde6b9..0000000 --- a/model/function_validator_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestFunctionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.Functions = Functions{{ - Name: "function 1", - Operation: "http://function/action", - Type: FunctionTypeREST, - }} - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Name = "" - model.Functions[0].Operation = "" - model.Functions[0].Type = "" - return *model - }, - Err: `workflow.functions[0].name is required -workflow.functions[0].operation is required -workflow.functions[0].type is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions = append(model.Functions, model.Functions[0]) - return *model - }, - Err: `workflow.functions has duplicate "name"`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Functions[0].Type = FunctionTypeREST + "invalid" - return *model - }, - Err: `workflow.functions[0].type need by one of [rest rpc expression graphql asyncapi odata custom]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/inject_state.go b/model/inject_state.go deleted file mode 100644 index a195423..0000000 --- a/model/inject_state.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// InjectState used to inject static data into state data input. -type InjectState struct { - // JSON object which can be set as state's data input and can be manipulated via filter - // +kubebuilder:validation:MinProperties=1 - Data map[string]Object `json:"data" validate:"required,min=1"` - // State specific timeouts - // +optional - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` -} - -func (i *InjectState) MarshalJSON() ([]byte, error) { - type Alias InjectState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *InjectStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(i), - Timeouts: i.Timeouts, - }) - return custom, err -} - -// InjectStateTimeout defines timeout settings for inject state -type InjectStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/operation_state.go b/model/operation_state.go deleted file mode 100644 index c530ad8..0000000 --- a/model/operation_state.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// OperationState defines a set of actions to be performed in sequence or in parallel. -// +builder-gen:new-call=ApplyDefault -type OperationState struct { - // Specifies whether actions are performed in sequence or in parallel, defaults to sequential. - // +kubebuilder:validation:Enum=sequential;parallel - // +kubebuilder:default=sequential - ActionMode ActionMode `json:"actionMode,omitempty" validate:"required,oneofkind"` - // Actions to be performed - // +kubebuilder:validation:MinItems=0 - Actions []Action `json:"actions" validate:"min=0,dive"` - // State specific timeouts - // +optional - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` -} - -func (a *OperationState) MarshalJSON() ([]byte, error) { - type Alias OperationState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *OperationStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(a), - Timeouts: a.Timeouts, - }) - return custom, err -} - -type operationStateUnmarshal OperationState - -// UnmarshalJSON unmarshal OperationState object from json bytes -func (o *OperationState) UnmarshalJSON(data []byte) error { - o.ApplyDefault() - return util.UnmarshalObject("operationState", data, (*operationStateUnmarshal)(o)) -} - -// ApplyDefault set the default values for Operation State -func (o *OperationState) ApplyDefault() { - o.ActionMode = ActionModeSequential -} - -// OperationStateTimeout defines the specific timeout settings for operation state -type OperationStateTimeout struct { - // Defines workflow state execution timeout. - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single actions definition execution timeout (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/operation_state_test.go b/model/operation_state_test.go deleted file mode 100644 index 4939797..0000000 --- a/model/operation_state_test.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestOperationStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect OperationState - err string - } - testCases := []testCase{ - { - desp: "all fields set", - data: `{"actionMode": "parallel"}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: ``, - }, - { - desp: "actionMode unset", - data: `{}`, - expect: OperationState{ - ActionMode: ActionModeSequential, - }, - err: ``, - }, - { - desp: "invalid object format", - data: `{"actionMode": parallel}`, - expect: OperationState{ - ActionMode: ActionModeParallel, - }, - err: `invalid character 'p' looking for beginning of value`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - v := OperationState{} - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/operation_state_validator_test.go b/model/operation_state_validator_test.go deleted file mode 100644 index 5da6dba..0000000 --- a/model/operation_state_validator_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildOperationState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: ActionModeSequential, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildOperationStateTimeout(state *State) *OperationStateTimeout { - state.OperationState.Timeouts = &OperationStateTimeout{ - ActionExecTimeout: "PT5S", - } - return state.OperationState.Timeouts -} - -func TestOperationStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions = []Action{} - return *model - }, - Err: ``, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.ActionMode = ActionModeParallel + "invalid" - return *model - }, - Err: `workflow.states[0].actionMode need by one of [sequential parallel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOperationStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - operationStateTimeout := buildOperationStateTimeout(operationState) - buildStateExecTimeoutByOperationStateTimeout(operationStateTimeout) - - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Timeouts.ActionExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/parallel_state.go b/model/parallel_state.go deleted file mode 100644 index f65b7a1..0000000 --- a/model/parallel_state.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// CompletionType define on how to complete branch execution. -type CompletionType string - -func (i CompletionType) KindValues() []string { - return []string{ - string(CompletionTypeAllOf), - string(CompletionTypeAtLeast), - } -} - -func (i CompletionType) String() string { - return string(i) -} - -const ( - // CompletionTypeAllOf defines all branches must complete execution before the state can transition/end. - CompletionTypeAllOf CompletionType = "allOf" - // CompletionTypeAtLeast defines state can transition/end once at least the specified number of branches - // have completed execution. - CompletionTypeAtLeast CompletionType = "atLeast" -) - -// ParallelState Consists of a number of states that are executed in parallel -// +builder-gen:new-call=ApplyDefault -type ParallelState struct { - // List of branches for this parallel state. - // +kubebuilder:validation:MinItems=1 - Branches []Branch `json:"branches" validate:"required,min=1,dive"` - // Option types on how to complete branch execution. Defaults to `allOf`. - // +kubebuilder:validation:Enum=allOf;atLeast - // +kubebuilder:default=allOf - CompletionType CompletionType `json:"completionType,omitempty" validate:"required,oneofkind"` - // Used when branchCompletionType is set to atLeast to specify the least number of branches that must complete - // in order for the state to transition/end. - // +optional - // TODO: change this field to unmarshal result as int - NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` - // State specific timeouts - // +optional - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` -} - -func (p *ParallelState) MarshalJSON() ([]byte, error) { - type Alias ParallelState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *ParallelStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(p), - Timeouts: p.Timeouts, - }) - return custom, err -} - -type parallelStateUnmarshal ParallelState - -// UnmarshalJSON unmarshal ParallelState object from json bytes -func (ps *ParallelState) UnmarshalJSON(data []byte) error { - ps.ApplyDefault() - return util.UnmarshalObject("parallelState", data, (*parallelStateUnmarshal)(ps)) -} - -// ApplyDefault set the default values for Parallel State -func (ps *ParallelState) ApplyDefault() { - ps.CompletionType = CompletionTypeAllOf -} - -// Branch Definition -type Branch struct { - // Branch name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Actions to be executed in this branch - // +kubebuilder:validation:MinItems=1 - Actions []Action `json:"actions" validate:"required,min=1,dive"` - // Branch specific timeout settings - // +optional - Timeouts *BranchTimeouts `json:"timeouts,omitempty"` -} - -// BranchTimeouts defines the specific timeout settings for branch -type BranchTimeouts struct { - // Single actions definition execution timeout duration (ISO 8601 duration format) - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,iso8601duration"` - // Single branch execution timeout duration (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// ParallelStateTimeout defines the specific timeout settings for parallel state -type ParallelStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Default single branch execution timeout (ISO 8601 duration format) - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,iso8601duration"` -} diff --git a/model/parallel_state_test.go b/model/parallel_state_test.go deleted file mode 100644 index b95cc69..0000000 --- a/model/parallel_state_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestParallelStateUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect *ParallelState - err string - } - testCases := []testCase{ - { - desp: "all field set", - data: `{"completionType": "allOf", "numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - { - desp: "all optional field not set", - data: `{"numCompleted": 1}`, - expect: &ParallelState{ - CompletionType: CompletionTypeAllOf, - NumCompleted: intstr.FromInt(1), - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ParallelState - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, &v) - }) - } -} diff --git a/model/parallel_state_validator.go b/model/parallel_state_validator.go deleted file mode 100644 index 5999071..0000000 --- a/model/parallel_state_validator.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(parallelStateStructLevelValidation, ParallelState{}) -} - -// ParallelStateStructLevelValidation custom validator for ParallelState -func parallelStateStructLevelValidation(_ context.Context, structLevel validator.StructLevel) { - parallelStateObj := structLevel.Current().Interface().(ParallelState) - - if parallelStateObj.CompletionType == CompletionTypeAtLeast { - if !val.ValidateGt0IntStr(¶llelStateObj.NumCompleted) { - structLevel.ReportError(reflect.ValueOf(parallelStateObj.NumCompleted), "NumCompleted", "NumCompleted", "gt0", "") - } - } -} diff --git a/model/parallel_state_validator_test.go b/model/parallel_state_validator_test.go deleted file mode 100644 index d1acea9..0000000 --- a/model/parallel_state_validator_test.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "k8s.io/apimachinery/pkg/util/intstr" -) - -func buildParallelState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeParallel, - }, - ParallelState: &ParallelState{ - CompletionType: CompletionTypeAllOf, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildBranch(state *State, name string) *Branch { - branch := Branch{ - Name: name, - } - - state.ParallelState.Branches = append(state.ParallelState.Branches, branch) - return &state.ParallelState.Branches[len(state.ParallelState.Branches)-1] -} - -func buildBranchTimeouts(branch *Branch) *BranchTimeouts { - branch.Timeouts = &BranchTimeouts{} - return branch.Timeouts -} - -func buildParallelStateTimeout(state *State) *ParallelStateTimeout { - state.ParallelState.Timeouts = &ParallelStateTimeout{ - BranchExecTimeout: "PT5S", - } - return state.ParallelState.Timeouts -} - -func TestParallelStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success completionTypeAllOf", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "success completionTypeAtLeast", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - model.States[0].ParallelState.NumCompleted = intstr.FromInt(1) - return *model - }, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast + " invalid" - return *model - }, - Err: `workflow.states[0].parallelState.completionType need by one of [allOf atLeast]`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = nil - model.States[0].ParallelState.CompletionType = "" - return *model - }, - Err: `workflow.states[0].parallelState.branches is required -workflow.states[0].parallelState.completionType is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches = []Branch{} - return *model - }, - Err: `workflow.states[0].parallelState.branches must have the minimum 1`, - }, - { - Desp: "required numCompleted", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.CompletionType = CompletionTypeAtLeast - return *model - }, - Err: `workflow.states[0].parallelState.numCompleted must be greater than 0`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Name = "" - model.States[0].ParallelState.Branches[0].Actions = nil - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].name is required -workflow.states[0].parallelState.branches[0].actions is required`, - }, - { - Desp: "min", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Actions = []Action{} - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].actions must have the minimum 1`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestBranchTimeoutsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - buildBranchTimeouts(branch) - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "PT5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "PT5S" - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Branches[0].Timeouts.ActionExecTimeout = "P5S" - model.States[0].ParallelState.Branches[0].Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.branches[0].timeouts.actionExecTimeout invalid iso8601 duration "P5S" -workflow.states[0].parallelState.branches[0].timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - StructLevelValidationCtx(t, testCases) -} - -func TestParallelStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - parallelState := buildParallelState(baseWorkflow, "start state") - buildParallelStateTimeout(parallelState) - buildEndByState(parallelState, true, false) - branch := buildBranch(parallelState, "brach 1") - action1 := buildActionByBranch(branch, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "" - return *model - }, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].ParallelState.Timeouts.BranchExecTimeout = "P5S" - return *model - }, - Err: `workflow.states[0].parallelState.timeouts.branchExecTimeout invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/retry.go b/model/retry.go deleted file mode 100644 index 83e2333..0000000 --- a/model/retry.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/util" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -// Retry ... -// +builder-gen:new-call=ApplyDefault -type Retry struct { - // Unique retry strategy name - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Time delay between retry attempts (ISO 8601 duration format) - Delay string `json:"delay,omitempty" validate:"omitempty,iso8601duration"` - // Maximum time delay between retry attempts (ISO 8601 duration format) - MaxDelay string `json:"maxDelay,omitempty" validate:"omitempty,iso8601duration"` - // Static value by which the delay increases during each attempt (ISO 8601 time format) - Increment string `json:"increment,omitempty" validate:"omitempty,iso8601duration"` - // Numeric value, if specified the delay between retries is multiplied by this value. - // +optional - Multiplier *floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=1"` - // Maximum number of retry attempts. - // +kubebuilder:validation:Required - MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` - // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) - // TODO: make iso8601duration compatible this type - Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` -} - -type retryUnmarshal Retry - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retry) UnmarshalJSON(data []byte) error { - r.ApplyDefault() - return util.UnmarshalObject("retry", data, (*retryUnmarshal)(r)) -} - -func (r *Retry) ApplyDefault() { - r.MaxAttempts = intstr.FromInt(1) -} diff --git a/model/retry_test.go b/model/retry_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/retry_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/retry_validator.go b/model/retry_validator.go deleted file mode 100644 index b95e2f7..0000000 --- a/model/retry_validator.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidation(retryStructLevelValidation, Retry{}) -} - -// RetryStructLevelValidation custom validator for Retry Struct -func retryStructLevelValidation(structLevel validator.StructLevel) { - retryObj := structLevel.Current().Interface().(Retry) - - if retryObj.Jitter.Type == floatstr.String && retryObj.Jitter.StrVal != "" { - err := val.ValidateISO8601TimeDuration(retryObj.Jitter.StrVal) - if err != nil { - structLevel.ReportError(reflect.ValueOf(retryObj.Jitter.StrVal), "Jitter", "jitter", "iso8601duration", "") - } - } -} diff --git a/model/retry_validator_test.go b/model/retry_validator_test.go deleted file mode 100644 index 5a3bca0..0000000 --- a/model/retry_validator_test.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" -) - -func TestRetryStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildRetryRef(baseWorkflow, action1, "retry 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "PT5S" - model.Retries[0].MaxDelay = "PT5S" - model.Retries[0].Increment = "PT5S" - model.Retries[0].Jitter = floatstr.FromString("PT5S") - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Name = "" - model.States[0].OperationState.Actions[0].RetryRef = "" - return *model - }, - Err: `workflow.retries[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries = append(model.Retries, model.Retries[0]) - return *model - }, - Err: `workflow.retries has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].RetryRef = "invalid retry" - return *model - }, - Err: `workflow.states[0].actions[0].retryRef don't exist "invalid retry"`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Retries[0].Delay = "P5S" - model.Retries[0].MaxDelay = "P5S" - model.Retries[0].Increment = "P5S" - model.Retries[0].Jitter = floatstr.FromString("P5S") - - return *model - }, - Err: `workflow.retries[0].delay invalid iso8601 duration "P5S" -workflow.retries[0].maxDelay invalid iso8601 duration "P5S" -workflow.retries[0].increment invalid iso8601 duration "P5S" -workflow.retries[0].jitter invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/sleep_state.go b/model/sleep_state.go deleted file mode 100644 index 5d144c5..0000000 --- a/model/sleep_state.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" -) - -// SleepState suspends workflow execution for a given time duration. -type SleepState struct { - // Duration (ISO 8601 duration format) to sleep - // +kubebuilder:validation:Required - Duration string `json:"duration" validate:"required,iso8601duration"` - // Timeouts State specific timeouts - // +optional - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SleepState) MarshalJSON() ([]byte, error) { - type Alias SleepState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SleepStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - return custom, err -} - -// SleepStateTimeout defines timeout settings for sleep state -type SleepStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` -} diff --git a/model/sleep_state_test.go b/model/sleep_state_test.go deleted file mode 100644 index c960f3c..0000000 --- a/model/sleep_state_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model diff --git a/model/sleep_state_validator_test.go b/model/sleep_state_validator_test.go deleted file mode 100644 index 057d6b3..0000000 --- a/model/sleep_state_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildSleepState(workflow *Workflow, name, duration string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSleep, - }, - SleepState: &SleepState{ - Duration: duration, - }, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildSleepStateTimeout(state *State) *SleepStateTimeout { - state.SleepState.Timeouts = &SleepStateTimeout{} - return state.SleepState.Timeouts -} - -func TestSleepStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "" - return *model - }, - Err: `workflow.states[0].sleepState.duration is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SleepState.Duration = "P5S" - return *model - }, - Err: `workflow.states[0].sleepState.duration invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSleepStateTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - sleepState := buildSleepState(baseWorkflow, "start state", "PT5S") - buildEndByState(sleepState, true, false) - sleepStateTimeout := buildSleepStateTimeout(sleepState) - buildStateExecTimeoutBySleepStateTimeout(sleepStateTimeout) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/state_exec_timeout.go b/model/state_exec_timeout.go deleted file mode 100644 index 0a53fd8..0000000 --- a/model/state_exec_timeout.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// StateExecTimeout defines workflow state execution timeout -type StateExecTimeout struct { - // Single state execution timeout, not including retries (ISO 8601 duration format) - // +optional - Single string `json:"single,omitempty" validate:"omitempty,iso8601duration"` - // Total state execution timeout, including retries (ISO 8601 duration format) - // +kubebuilder:validation:Required - Total string `json:"total" validate:"required,iso8601duration"` -} - -type stateExecTimeoutUnmarshal StateExecTimeout - -// UnmarshalJSON unmarshal StateExecTimeout object from json bytes -func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("stateExecTimeout", data, &s.Total, (*stateExecTimeoutUnmarshal)(s)) -} diff --git a/model/state_exec_timeout_test.go b/model/state_exec_timeout_test.go deleted file mode 100644 index 6030395..0000000 --- a/model/state_exec_timeout_test.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestStateExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - - expect *StateExecTimeout - err string - } - testCases := []testCase{ - { - desp: "normal string", - data: `"PT10S"`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total", - data: `{ - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "normal object with total & single", - data: `{ - "single": "PT1S", - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "PT1S", - Total: "PT10S", - }, - err: ``, - }, - { - desp: "invalid string or object", - data: `PT10S`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout has a syntax error "invalid character 'P' looking for beginning of value"`, - }, - { - desp: "invalid total type", - data: `{ - "single": "PT1S", - "total": 10 - }`, - - expect: &StateExecTimeout{}, - err: `stateExecTimeout.total must be string`, - }, - { - desp: "invalid single type", - data: `{ - "single": 1, - "total": "PT10S" - }`, - - expect: &StateExecTimeout{ - Single: "", - Total: "PT10S", - }, - err: `stateExecTimeout.single must be string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - actual := &StateExecTimeout{} - err := actual.UnmarshalJSON([]byte(tc.data)) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, actual) - }) - } -} diff --git a/model/state_exec_timeout_validator_test.go b/model/state_exec_timeout_validator_test.go deleted file mode 100644 index 5a2f794..0000000 --- a/model/state_exec_timeout_validator_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func buildStateExecTimeoutByTimeouts(timeouts *Timeouts) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutBySleepStateTimeout(timeouts *SleepStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - } - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func buildStateExecTimeoutByOperationStateTimeout(timeouts *OperationStateTimeout) *StateExecTimeout { - stateExecTimeout := StateExecTimeout{ - Total: "PT5S", - Single: "PT5S", - } - timeouts.ActionExecTimeout = "PT5S" - timeouts.StateExecTimeout = &stateExecTimeout - return timeouts.StateExecTimeout -} - -func TestStateExecTimeoutStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - timeouts := buildTimeouts(baseWorkflow) - buildStateExecTimeoutByTimeouts(timeouts) - - callbackState := buildCallbackState(baseWorkflow, "start state", "event 1") - buildEndByState(callbackState, true, false) - buildCallbackStateTimeout(callbackState.CallbackState) - buildFunctionRef(baseWorkflow, &callbackState.Action, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "omitempty", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "" - return *model - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.total is required`, - }, - { - Desp: "iso8601duration", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.Timeouts.StateExecTimeout.Single = "P5S" - model.BaseWorkflow.Timeouts.StateExecTimeout.Total = "P5S" - return *model - }, - Err: `workflow.timeouts.stateExecTimeout.single invalid iso8601 duration "P5S" -workflow.timeouts.stateExecTimeout.total invalid iso8601 duration "P5S"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/states.go b/model/states.go deleted file mode 100644 index fa834f7..0000000 --- a/model/states.go +++ /dev/null @@ -1,281 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// StateType ... -type StateType string - -func (s StateType) KindValues() []string { - return []string{ - string(StateTypeDelay), - string(StateTypeEvent), - string(StateTypeOperation), - string(StateTypeParallel), - string(StateTypeSwitch), - string(StateTypeForEach), - string(StateTypeInject), - string(StateTypeCallback), - string(StateTypeSleep), - } -} - -func (s StateType) String() string { - return string(s) -} - -const ( - // StateTypeDelay ... - StateTypeDelay StateType = "delay" - // StateTypeEvent ... - StateTypeEvent StateType = "event" - // StateTypeOperation ... - StateTypeOperation StateType = "operation" - // StateTypeParallel ... - StateTypeParallel StateType = "parallel" - // StateTypeSwitch ... - StateTypeSwitch StateType = "switch" - // StateTypeForEach ... - StateTypeForEach StateType = "foreach" - // StateTypeInject ... - StateTypeInject StateType = "inject" - // StateTypeCallback ... - StateTypeCallback StateType = "callback" - // StateTypeSleep ... - StateTypeSleep StateType = "sleep" -) - -// BaseState ... -type BaseState struct { - // Unique State id. - // +optional - ID string `json:"id,omitempty"` - // State name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // stateType can be any of delay, callback, event, foreach, inject, operation, parallel, sleep, switch - // +kubebuilder:validation:Enum:=delay;callback;event;foreach;inject;operation;parallel;sleep;switch - // +kubebuilder:validation:Required - Type StateType `json:"type" validate:"required,oneofkind"` - // States error handling and retries definitions. - // +optional - OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` - // Next transition of the workflow after the time delay. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // State data filter. - // +optional - StateDataFilter *StateDataFilter `json:"stateDataFilter,omitempty"` - // Unique Name of a workflow state which is responsible for compensation of this state. - // +optional - CompensatedBy string `json:"compensatedBy,omitempty"` - // If true, this state is used to compensate another state. Default is false. - // +optional - UsedForCompensation bool `json:"usedForCompensation,omitempty"` - // State end definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` - // Metadata information. - // +optional - Metadata *Metadata `json:"metadata,omitempty"` -} - -func (b *BaseState) MarshalJSON() ([]byte, error) { - type Alias BaseState - if b == nil { - return nil, nil - } - cus, err := json.Marshal(struct { - *Alias - }{ - Alias: (*Alias)(b), - }) - return cus, err -} - -// +builder-gen:embedded-ignore-method=BaseState -type State struct { - BaseState `json:",inline"` - // delayState Causes the workflow execution to delay for a specified duration. - // +optional - *DelayState `json:"delayState,omitempty"` - // event states await one or more events and perform actions when they are received. If defined as the - // workflow starting state, the event state definition controls when the workflow instances should be created. - // +optional - *EventState `json:"eventState,omitempty"` - // operationState defines a set of actions to be performed in sequence or in parallel. - // +optional - *OperationState `json:"operationState,omitempty"` - // parallelState Consists of a number of states that are executed in parallel. - // +optional - *ParallelState `json:"parallelState,omitempty"` - // switchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. - // +optional - *SwitchState `json:"switchState,omitempty"` - // forEachState used to execute actions for each element of a data set. - // +optional - *ForEachState `json:"forEachState,omitempty"` - // injectState used to inject static data into state data input. - // +optional - *InjectState `json:"injectState,omitempty"` - // callbackState executes a function and waits for callback event that indicates completion of the task. - // +optional - *CallbackState `json:"callbackState,omitempty"` - // sleepState suspends workflow execution for a given time duration. - // +optional - *SleepState `json:"sleepState,omitempty"` -} - -func (s *State) MarshalJSON() ([]byte, error) { - if s == nil { - return nil, nil - } - r := []byte("") - var errs error - - if s.DelayState != nil { - r, errs = s.DelayState.MarshalJSON() - } - - if s.EventState != nil { - r, errs = s.EventState.MarshalJSON() - } - - if s.OperationState != nil { - r, errs = s.OperationState.MarshalJSON() - } - - if s.ParallelState != nil { - r, errs = s.ParallelState.MarshalJSON() - } - - if s.SwitchState != nil { - r, errs = s.SwitchState.MarshalJSON() - } - - if s.ForEachState != nil { - r, errs = s.ForEachState.MarshalJSON() - } - - if s.InjectState != nil { - r, errs = s.InjectState.MarshalJSON() - } - - if s.CallbackState != nil { - r, errs = s.CallbackState.MarshalJSON() - } - - if s.SleepState != nil { - r, errs = s.SleepState.MarshalJSON() - } - - b, err := s.BaseState.MarshalJSON() - if err != nil { - return nil, err - } - - //remove }{ as BaseState and the State Type needs to be merged together - partialResult := append(b, r...) - result := strings.Replace(string(partialResult), "}{", ",", 1) - return []byte(result), errs -} - -type unmarshalState State - -// UnmarshalJSON implements json.Unmarshaler -func (s *State) UnmarshalJSON(data []byte) error { - if err := util.UnmarshalObject("state", data, (*unmarshalState)(s)); err != nil { - return err - } - - switch s.Type { - case StateTypeDelay: - state := &DelayState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.DelayState = state - - case StateTypeEvent: - state := &EventState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.EventState = state - - case StateTypeOperation: - state := &OperationState{} - if err := util.UnmarshalObject("states", data, state); err != nil { - return err - } - s.OperationState = state - - case StateTypeParallel: - state := &ParallelState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ParallelState = state - - case StateTypeSwitch: - state := &SwitchState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SwitchState = state - - case StateTypeForEach: - state := &ForEachState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.ForEachState = state - - case StateTypeInject: - state := &InjectState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.InjectState = state - - case StateTypeCallback: - state := &CallbackState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.CallbackState = state - - case StateTypeSleep: - state := &SleepState{} - if err := json.Unmarshal(data, state); err != nil { - return err - } - s.SleepState = state - default: - return fmt.Errorf("states type %q not supported", s.Type.String()) - } - return nil -} diff --git a/model/states_validator.go b/model/states_validator.go deleted file mode 100644 index 1bb58e5..0000000 --- a/model/states_validator.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(baseStateStructLevelValidationCtx), BaseState{}) -} - -func baseStateStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - baseState := structLevel.Current().Interface().(BaseState) - if baseState.Type != StateTypeSwitch && !baseState.UsedForCompensation { - validTransitionAndEnd(structLevel, baseState, baseState.Transition, baseState.End) - } - - if baseState.CompensatedBy != "" { - if baseState.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagRecursiveCompensation, "") - } - - if ctx.ExistState(baseState.CompensatedBy) { - value := ctx.States[baseState.CompensatedBy].BaseState - if value.UsedForCompensation && value.Type == StateTypeEvent { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedbyEventState, "") - - } else if !value.UsedForCompensation { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagCompensatedby, "") - } - - } else { - structLevel.ReportError(baseState.CompensatedBy, "CompensatedBy", "compensatedBy", val.TagExists, "") - } - } -} diff --git a/model/states_validator_test.go b/model/states_validator_test.go deleted file mode 100644 index 8766d87..0000000 --- a/model/states_validator_test.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func TestBaseStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 3) - - operationState := buildOperationState(baseWorkflow, "start state 1") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "state 2") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - eventState := buildEventState(baseWorkflow, "state 3") - buildOnEvents(baseWorkflow, eventState, "event 1") - buildEndByState(eventState, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - return *model - }, - }, - { - Desp: "repeat name", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States = []State{model.States[0], model.States[0]} - return *model - }, - Err: `workflow.states has duplicate "name"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = "invalid state compensate by" - return *model - }, - Err: `workflow.states[0].compensatedBy don't exist "invalid state compensate by"`, - }, - { - Desp: "tagcompensatedby", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.CompensatedBy = model.States[1].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 2" is not defined as usedForCompensation`, - }, - { - Desp: "compensatedbyeventstate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[2].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[2].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "state 3" is defined as usedForCompensation and cannot be an event state`, - }, - { - Desp: "recursivecompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.UsedForCompensation = true - model.States[0].BaseState.CompensatedBy = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].compensatedBy = "start state 1" is defined as usedForCompensation (cannot themselves set their compensatedBy)`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - action2 := buildActionByOperationState(operationState2, "action 2") - buildFunctionRef(baseWorkflow, action2, "function 2") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End = nil - return *model - }, - Err: `workflow.states[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByState(&model.States[0], &model.States[1], false) - - return *model - }, - Err: `workflow.states[0].transition exclusive`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Type = StateTypeOperation + "invalid" - return *model - }, - Err: `workflow.states[0].type need by one of [delay event operation parallel switch foreach inject callback sleep]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/switch_state.go b/model/switch_state.go deleted file mode 100644 index 15d1a6d..0000000 --- a/model/switch_state.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "strings" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -type EventConditions []EventCondition - -// SwitchState is workflow's gateways: direct transitions onf a workflow based on certain conditions. -type SwitchState struct { - // TODO: don't use BaseState for this, there are a few fields that SwitchState don't need. - - // Default transition of the workflow if there is no matching data conditions. Can include a transition or - // end definition. - DefaultCondition DefaultCondition `json:"defaultCondition"` - // Defines conditions evaluated against events. - // +optional - EventConditions EventConditions `json:"eventConditions" validate:"dive"` - // Defines conditions evaluated against data - // +optional - DataConditions []DataCondition `json:"dataConditions" validate:"dive"` - // SwitchState specific timeouts - // +optional - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` -} - -func (s *SwitchState) MarshalJSON() ([]byte, error) { - type Alias SwitchState - custom, err := json.Marshal(&struct { - *Alias - Timeouts *SwitchStateTimeout `json:"timeouts,omitempty"` - }{ - Alias: (*Alias)(s), - Timeouts: s.Timeouts, - }) - - // Avoid marshal empty objects as null. - st := strings.Replace(string(custom), "\"eventConditions\":null,", "", 1) - st = strings.Replace(st, "\"dataConditions\":null,", "", 1) - st = strings.Replace(st, "\"end\":null,", "", -1) - return []byte(st), err -} - -// DefaultCondition Can be either a transition or end definition -type DefaultCondition struct { - // Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). - // Each state can define a transition definition that is used to determine which state to transition to next. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // If this state an end state - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` -} - -type defaultConditionUnmarshal DefaultCondition - -// UnmarshalJSON implements json.Unmarshaler -func (e *DefaultCondition) UnmarshalJSON(data []byte) error { - var nextState string - err := util.UnmarshalPrimitiveOrObject("defaultCondition", data, &nextState, (*defaultConditionUnmarshal)(e)) - if err != nil { - return err - } - - if nextState != "" { - e.Transition = &Transition{NextState: nextState} - } - - return err -} - -// SwitchStateTimeout defines the specific timeout settings for switch state -type SwitchStateTimeout struct { - // Default workflow state execution timeout (ISO 8601 duration format) - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // Specify the expire value to transitions to defaultCondition. When event-based conditions do not arrive. - // NOTE: this is only available for EventConditions - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,iso8601duration"` -} - -// EventCondition specify events which the switch state must wait for. -type EventCondition struct { - // Event condition name. - // +optional - Name string `json:"name,omitempty"` - // References a unique event name in the defined workflow events. - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // Event data filter definition. - // +optional - EventDataFilter *EventDataFilter `json:"eventDataFilter,omitempty"` - // Metadata information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition" validate:"omitempty"` -} - -// DataCondition specify a data-based condition statement which causes a transition to another workflow state -// if evaluated to true. -type DataCondition struct { - // Data condition name. - // +optional - Name string `json:"name,omitempty"` - // Workflow expression evaluated against state data. Must evaluate to true or false. - // +kubebuilder:validation:Required - Condition string `json:"condition" validate:"required"` - // Metadata information. - // +optional - Metadata Metadata `json:"metadata,omitempty"` - // TODO End or Transition needs to be exclusive tag, one or another should be set. - // Explicit transition to end - End *End `json:"end" validate:"omitempty"` - // Workflow transition if condition is evaluated to true - Transition *Transition `json:"transition,omitempty" validate:"omitempty"` -} diff --git a/model/switch_state_test.go b/model/switch_state_test.go deleted file mode 100644 index e2f5c51..0000000 --- a/model/switch_state_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDefaultConditionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect DefaultCondition - err string - } - - testCases := []testCase{ - { - desp: "json nextState success", - data: `{"transition": {"nextState": "next state"}}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json nextState", - data: `{"transition": {"nextState": "next state}}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json nextState type", - data: `{"transition": {"nextState": true}}`, - err: `transition.nextState must be string`, - }, - { - desp: "transition json success", - data: `{"transition": "next state"}`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid json transition", - data: `{"transition": "next state}`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json transition type", - data: `{"transition": true}`, - err: `transition must be string or object`, - }, - { - desp: "string success", - data: `"next state"`, - expect: DefaultCondition{ - Transition: &Transition{ - NextState: "next state", - }, - }, - err: ``, - }, - { - desp: "invalid string syntax", - data: `"next state`, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid type", - data: `123`, - err: `defaultCondition must be string or object`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DefaultCondition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/switch_state_validator.go b/model/switch_state_validator.go deleted file mode 100644 index 5738104..0000000 --- a/model/switch_state_validator.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "reflect" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func init() { - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(switchStateStructLevelValidation), SwitchState{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(defaultConditionStructLevelValidation), DefaultCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(eventConditionStructLevelValidationCtx), EventCondition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(dataConditionStructLevelValidation), DataCondition{}) -} - -// SwitchStateStructLevelValidation custom validator for SwitchState -func switchStateStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - switchState := structLevel.Current().Interface().(SwitchState) - - switch { - case len(switchState.DataConditions) == 0 && len(switchState.EventConditions) == 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagRequired, "") - case len(switchState.DataConditions) > 0 && len(switchState.EventConditions) > 0: - structLevel.ReportError(reflect.ValueOf(switchState), "DataConditions", "dataConditions", val.TagExclusive, "") - } -} - -// DefaultConditionStructLevelValidation custom validator for DefaultCondition -func defaultConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - defaultCondition := structLevel.Current().Interface().(DefaultCondition) - validTransitionAndEnd(structLevel, defaultCondition, defaultCondition.Transition, defaultCondition.End) -} - -// EventConditionStructLevelValidation custom validator for EventCondition -func eventConditionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - eventCondition := structLevel.Current().Interface().(EventCondition) - validTransitionAndEnd(structLevel, eventCondition, eventCondition.Transition, eventCondition.End) - - if eventCondition.EventRef != "" && !ctx.ExistEvent(eventCondition.EventRef) { - structLevel.ReportError(eventCondition.EventRef, "eventRef", "EventRef", val.TagExists, "") - } -} - -// DataConditionStructLevelValidation custom validator for DataCondition -func dataConditionStructLevelValidation(ctx ValidatorContext, structLevel validator.StructLevel) { - dataCondition := structLevel.Current().Interface().(DataCondition) - validTransitionAndEnd(structLevel, dataCondition, dataCondition.Transition, dataCondition.End) -} diff --git a/model/switch_state_validator_test.go b/model/switch_state_validator_test.go deleted file mode 100644 index 9c40462..0000000 --- a/model/switch_state_validator_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" -) - -func buildSwitchState(workflow *Workflow, name string) *State { - state := State{ - BaseState: BaseState{ - Name: name, - Type: StateTypeSwitch, - }, - SwitchState: &SwitchState{}, - } - - workflow.States = append(workflow.States, state) - return &workflow.States[len(workflow.States)-1] -} - -func buildDefaultCondition(state *State) *DefaultCondition { - state.SwitchState.DefaultCondition = DefaultCondition{} - return &state.SwitchState.DefaultCondition -} - -func buildDataCondition(state *State, name, condition string) *DataCondition { - if state.SwitchState.DataConditions == nil { - state.SwitchState.DataConditions = []DataCondition{} - } - - dataCondition := DataCondition{ - Name: name, - Condition: condition, - } - - state.SwitchState.DataConditions = append(state.SwitchState.DataConditions, dataCondition) - return &state.SwitchState.DataConditions[len(state.SwitchState.DataConditions)-1] -} - -func buildEventCondition(workflow *Workflow, state *State, name, eventRef string) (*Event, *EventCondition) { - workflow.Events = append(workflow.Events, Event{ - Name: eventRef, - Type: "event type", - Kind: EventKindConsumed, - }) - - eventCondition := EventCondition{ - Name: name, - EventRef: eventRef, - } - - state.SwitchState.EventConditions = append(state.SwitchState.EventConditions, eventCondition) - return &workflow.Events[len(workflow.Events)-1], &state.SwitchState.EventConditions[len(state.SwitchState.EventConditions)-1] -} - -func TestSwitchStateStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - swithState := buildSwitchState(baseWorkflow, "start state") - defaultCondition := buildDefaultCondition(swithState) - buildEndByDefaultCondition(defaultCondition, true, false) - - dataCondition := buildDataCondition(swithState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildEventCondition(model, &model.States[0], "event condition", "event 1") - buildEndByEventCondition(&model.States[0].SwitchState.EventConditions[0], true, false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDefaultConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - buildSwitchState(baseWorkflow, "start state") - buildDefaultCondition(&baseWorkflow.States[0]) - - buildDataCondition(&baseWorkflow.States[0], "data condition 1", "1=1") - buildEndByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[0], true, false) - buildDataCondition(&baseWorkflow.States[0], "data condition 2", "1=1") - - buildOperationState(baseWorkflow, "end state") - buildEndByState(&baseWorkflow.States[1], true, false) - buildActionByOperationState(&baseWorkflow.States[1], "action 1") - buildFunctionRef(baseWorkflow, &baseWorkflow.States[1].OperationState.Actions[0], "function 1") - - buildTransitionByDefaultCondition(&baseWorkflow.States[0].SwitchState.DefaultCondition, &baseWorkflow.States[1]) - buildTransitionByDataCondition(&baseWorkflow.States[0].SwitchState.DataConditions[1], &baseWorkflow.States[1], false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestSwitchStateTimeoutStructLevelValidation(t *testing.T) { -} - -func TestEventConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 2) - - // switch state - switchState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(switchState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // event condition 1 - _, eventCondition := buildEventCondition(baseWorkflow, switchState, "data condition 1", "event 1") - buildEndByEventCondition(eventCondition, true, false) - - // event condition 2 - _, eventCondition2 := buildEventCondition(baseWorkflow, switchState, "data condition 2", "event 2") - buildEndByEventCondition(eventCondition2, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - // trasition switch state to operation state - buildTransitionByEventCondition(eventCondition, operationState, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].EventRef = "event not found" - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].eventRef don't exist "event not found"`, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.EventConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByEventCondition(&model.States[0].SwitchState.EventConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.eventConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataConditionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - // switch state - swithcState := buildSwitchState(baseWorkflow, "start state") - - // default condition - defaultCondition := buildDefaultCondition(swithcState) - buildEndByDefaultCondition(defaultCondition, true, false) - - // data condition - dataCondition := buildDataCondition(swithcState, "data condition 1", "1=1") - buildEndByDataCondition(dataCondition, true, false) - - // operation state - operationState := buildOperationState(baseWorkflow, "end state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].SwitchState.DataConditions[0].End = nil - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - buildTransitionByDataCondition(&model.States[0].SwitchState.DataConditions[0], &model.States[1], false) - return *model - }, - Err: `workflow.states[0].switchState.dataConditions[0].transition exclusive`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/workflow.go b/model/workflow.go deleted file mode 100644 index aa72d1f..0000000 --- a/model/workflow.go +++ /dev/null @@ -1,591 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "bytes" - "encoding/json" - "errors" - - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -// InvokeKind defines how the target is invoked. -type InvokeKind string - -func (i InvokeKind) KindValues() []string { - return []string{ - string(InvokeKindSync), - string(InvokeKindAsync), - } -} - -func (i InvokeKind) String() string { - return string(i) -} - -const ( - // InvokeKindSync meaning that worfklow execution should wait until the target completes. - InvokeKindSync InvokeKind = "sync" - // InvokeKindAsync meaning that workflow execution should just invoke the target and should not wait until its - // completion. - InvokeKindAsync InvokeKind = "async" -) - -// ActionMode specifies how actions are to be performed. -type ActionMode string - -func (i ActionMode) KindValues() []string { - return []string{ - string(ActionModeSequential), - string(ActionModeParallel), - } -} - -func (i ActionMode) String() string { - return string(i) -} - -const ( - // ActionModeSequential specifies actions should be performed in sequence - ActionModeSequential ActionMode = "sequential" - - // ActionModeParallel specifies actions should be performed in parallel - ActionModeParallel ActionMode = "parallel" -) - -const ( - // UnlimitedTimeout description for unlimited timeouts - UnlimitedTimeout = "unlimited" -) - -type ExpressionLangType string - -func (i ExpressionLangType) KindValues() []string { - return []string{ - string(JqExpressionLang), - string(JsonPathExpressionLang), - string(CELExpressionLang), - } -} - -func (i ExpressionLangType) String() string { - return string(i) -} - -const ( - //JqExpressionLang ... - JqExpressionLang ExpressionLangType = "jq" - - // JsonPathExpressionLang ... - JsonPathExpressionLang ExpressionLangType = "jsonpath" - - // CELExpressionLang - CELExpressionLang ExpressionLangType = "cel" -) - -// BaseWorkflow describes the partial Workflow definition that does not rely on generic interfaces -// to make it easy for custom unmarshalers implementations to unmarshal the common data structure. -// +builder-gen:new-call=ApplyDefault -type BaseWorkflow struct { - // Workflow unique identifier - // +optional - ID string `json:"id,omitempty" validate:"required_without=Key"` - // Key Domain-specific workflow identifier - // +optional - Key string `json:"key,omitempty" validate:"required_without=ID"` - // Workflow name - Name string `json:"name,omitempty"` - // Workflow description. - // +optional - Description string `json:"description,omitempty"` - // Workflow version. - // +optional - Version string `json:"version" validate:"omitempty,min=1"` - // Workflow start definition. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Start *Start `json:"start,omitempty"` - // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important - // qualities. - // +optional - Annotations []string `json:"annotations,omitempty"` - // DataInputSchema URI or Object of the JSON Schema used to validate the workflow data input - // +optional - DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` - // Serverless Workflow schema version - // +kubebuilder:validation:Required - // +kubebuilder:default="0.8" - SpecVersion string `json:"specVersion" validate:"required"` - // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc, - // inside your Workflow Expressions. - // +optional - Secrets Secrets `json:"secrets,omitempty" validate:"unique"` - // Constants Workflow constants are used to define static, and immutable, data which is available to - // Workflow Expressions. - // +optional - Constants *Constants `json:"constants,omitempty"` - // Identifies the expression language used for workflow expressions. Default is 'jq'. - // +kubebuilder:validation:Enum=jq;jsonpath;cel - // +kubebuilder:default=jq - // +optional - ExpressionLang ExpressionLangType `json:"expressionLang,omitempty" validate:"required,oneofkind"` - // Defines the workflow default timeout settings. - // +optional - Timeouts *Timeouts `json:"timeouts,omitempty"` - // Defines checked errors that can be explicitly handled during workflow execution. - // +optional - Errors Errors `json:"errors,omitempty" validate:"unique=Name,dive"` - // If "true", workflow instances is not terminated when there are no active execution paths. - // Instance can be terminated with "terminate end definition" or reaching defined "workflowExecTimeout" - // +optional - KeepActive bool `json:"keepActive,omitempty"` - // Metadata custom information shared with the runtime. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Metadata Metadata `json:"metadata,omitempty"` - // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false - // +optional - AutoRetries bool `json:"autoRetries,omitempty"` - // Auth definitions can be used to define authentication information that should be applied to resources defined - // in the operation property of function definitions. It is not used as authentication information for the - // function invocation, but just to access the resource containing the function invocation information. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Auth Auths `json:"auth,omitempty" validate:"unique=Name,dive"` -} - -// ApplyDefault set the default values for Workflow -func (w *BaseWorkflow) ApplyDefault() { - w.SpecVersion = "0.8" - w.ExpressionLang = JqExpressionLang -} - -type Auths []Auth - -type authsUnmarshal Auths - -// UnmarshalJSON implements json.Unmarshaler -func (r *Auths) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("auth", data, (*authsUnmarshal)(r)) -} - -type Errors []Error - -type errorsUnmarshal Errors - -// UnmarshalJSON implements json.Unmarshaler -func (e *Errors) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("errors", data, (*errorsUnmarshal)(e)) -} - -// Workflow base definition -// +builder-gen:embedded-ignore-method=BaseWorkflow -type Workflow struct { - BaseWorkflow `json:",inline"` - // +kubebuilder:pruning:PreserveUnknownFields - States States `json:"states" validate:"min=1,unique=Name,dive"` - // +optional - Events Events `json:"events,omitempty" validate:"unique=Name,dive"` - // +optional - Functions Functions `json:"functions,omitempty" validate:"unique=Name,dive"` - // +optional - Retries Retries `json:"retries,omitempty" validate:"unique=Name,dive"` -} - -type workflowUnmarshal Workflow - -// UnmarshalJSON implementation for json Unmarshal function for the Workflow type -func (w *Workflow) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - err := util.UnmarshalObject("workflow", data, (*workflowUnmarshal)(w)) - if err != nil { - return err - } - - if w.Start == nil && len(w.States) > 0 { - w.Start = &Start{ - StateName: w.States[0].Name, - } - } - - return nil -} - -// States ... -// +kubebuilder:validation:MinItems=1 -type States []State - -type statesUnmarshal States - -// UnmarshalJSON implements json.Unmarshaler -func (s *States) UnmarshalJSON(data []byte) error { - return util.UnmarshalObject("states", data, (*statesUnmarshal)(s)) -} - -type Events []Event - -type eventsUnmarshal Events - -// UnmarshalJSON implements json.Unmarshaler -func (e *Events) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("events", data, (*eventsUnmarshal)(e)) -} - -type Functions []Function - -type functionsUnmarshal Functions - -// UnmarshalJSON implements json.Unmarshaler -func (f *Functions) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("functions", data, (*functionsUnmarshal)(f)) -} - -type Retries []Retry - -type retriesUnmarshal Retries - -// UnmarshalJSON implements json.Unmarshaler -func (r *Retries) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("retries", data, (*retriesUnmarshal)(r)) -} - -// Timeouts ... -type Timeouts struct { - // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should - // be 'unlimited'. - // +optional - WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` - // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format). - // +optional - StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` - // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format). - // +optional - ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` - // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format). - // +optional - BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` - // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format). - // +optional - EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` -} - -type timeoutsUnmarshal Timeouts - -// UnmarshalJSON implements json.Unmarshaler -func (t *Timeouts) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("timeouts", data, (*timeoutsUnmarshal)(t)) -} - -// WorkflowExecTimeout property defines the workflow execution timeout. It is defined using the ISO 8601 duration -// format. If not defined, the workflow execution should be given "unlimited" amount of time to complete. -// +builder-gen:new-call=ApplyDefault -type WorkflowExecTimeout struct { - // Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited'. - // +kubebuilder:default=unlimited - Duration string `json:"duration" validate:"required,min=1,iso8601duration"` - // If false, workflow instance is allowed to finish current execution. If true, current workflow execution - // is stopped immediately. Default is false. - // +optional - Interrupt bool `json:"interrupt,omitempty"` - // Name of a workflow state to be executed before workflow instance is terminated. - // +optional - RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` -} - -type workflowExecTimeoutUnmarshal WorkflowExecTimeout - -// UnmarshalJSON implements json.Unmarshaler -func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { - w.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("workflowExecTimeout", data, &w.Duration, (*workflowExecTimeoutUnmarshal)(w)) -} - -// ApplyDefault set the default values for Workflow Exec Timeout -func (w *WorkflowExecTimeout) ApplyDefault() { - w.Duration = UnlimitedTimeout -} - -// Error declaration for workflow definitions -type Error struct { - // Name Domain-specific error name. - // +kubebuilder:validation:Required - Name string `json:"name" validate:"required"` - // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. - // Should not be defined if error is set to '*'. - // +optional - Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // OnError description. - // +optional - Description string `json:"description,omitempty"` -} - -// Start definition -type Start struct { - // Name of the starting workflow state - // +kubebuilder:validation:Required - StateName string `json:"stateName" validate:"required"` - // Define the recurring time intervals or cron expressions at which workflow instances should be automatically - // started. - // +optional - Schedule *Schedule `json:"schedule,omitempty" validate:"omitempty"` -} - -type startUnmarshal Start - -// UnmarshalJSON implements json.Unmarshaler -func (s *Start) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("start", data, &s.StateName, (*startUnmarshal)(s)) -} - -// Schedule ... -type Schedule struct { - // TODO Interval is required if Cron is not set and vice-versa, make a exclusive validation - // A recurring time interval expressed in the derivative of ISO 8601 format specified below. Declares that - // workflow instances should be automatically created at the start of each time interval in the series. - // +optional - Interval string `json:"interval,omitempty"` - // Cron expression defining when workflow instances should be automatically created. - // optional - Cron *Cron `json:"cron,omitempty"` - // Timezone name used to evaluate the interval & cron-expression. If the interval specifies a date-time - // w/ timezone then proper timezone conversion will be applied. (default: UTC). - // +optional - Timezone string `json:"timezone,omitempty"` -} - -type scheduleUnmarshal Schedule - -// UnmarshalJSON implements json.Unmarshaler -func (s *Schedule) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("schedule", data, &s.Interval, (*scheduleUnmarshal)(s)) -} - -// Cron ... -type Cron struct { - // Cron expression describing when the workflow instance should be created (automatically). - // +kubebuilder:validation:Required - Expression string `json:"expression" validate:"required"` - // Specific date and time (ISO 8601 format) when the cron expression is no longer valid. - // +optional - ValidUntil string `json:"validUntil,omitempty" validate:"omitempty,iso8601datetime"` -} - -type cronUnmarshal Cron - -// UnmarshalJSON custom unmarshal function for Cron -func (c *Cron) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("cron", data, &c.Expression, (*cronUnmarshal)(c)) -} - -// Transition Serverless workflow states can have one or more incoming and outgoing transitions (from/to other states). -// Each state can define a transition definition that is used to determine which state to transition to next. -type Transition struct { - stateParent *State `json:"-"` // used in validation - // Name of the state to transition to next. - // +kubebuilder:validation:Required - NextState string `json:"nextState" validate:"required,min=1"` - // Array of producedEvent definitions. Events to be produced before the transition takes place. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty" validate:"omitempty,dive"` - // If set to true, triggers workflow compensation before this transition is taken. Default is false. - // +kubebuilder:default=false - // +optional - Compensate bool `json:"compensate,omitempty"` -} - -type transitionUnmarshal Transition - -// UnmarshalJSON implements json.Unmarshaler -func (t *Transition) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("transition", data, &t.NextState, (*transitionUnmarshal)(t)) -} - -// OnError ... -type OnError struct { - // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used - ErrorRef string `json:"errorRef,omitempty"` - // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used - ErrorRefs []string `json:"errorRefs,omitempty" validate:"omitempty,unique"` - // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - Transition *Transition `json:"transition,omitempty"` - // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if - // retries were unsuccessful. - // +kubebuilder:validation:Schemaless - // +kubebuilder:pruning:PreserveUnknownFields - // +optional - End *End `json:"end,omitempty"` -} - -// End definition -type End struct { - // If true, completes all execution flows in the given workflow instance. - // +optional - Terminate bool `json:"terminate,omitempty"` - // Array of producedEvent definitions. Defines events that should be produced. - // +optional - ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` - // If set to true, triggers workflow compensation before workflow execution completes. Default is false. - // +optional - Compensate bool `json:"compensate,omitempty"` - // Defines that current workflow execution should stop, and execution should continue as a new workflow - // instance of the provided id - // +optional - ContinueAs *ContinueAs `json:"continueAs,omitempty"` -} - -type endUnmarshal End - -// UnmarshalJSON implements json.Unmarshaler -func (e *End) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("end", data, &e.Terminate, (*endUnmarshal)(e)) -} - -// ContinueAs can be used to stop the current workflow execution and start another one (of the same or a different type) -type ContinueAs struct { - // Unique id of the workflow to continue execution as. - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Version of the workflow to continue execution as. - // +optional - Version string `json:"version,omitempty"` - // If string type, an expression which selects parts of the states data output to become the workflow data input of - // continued execution. If object type, a custom object to become the workflow data input of the continued execution - // +optional - Data Object `json:"data,omitempty"` - // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. - // Overwrites any specific settings set by that workflow - // +optional - WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` -} - -type continueAsUnmarshal ContinueAs - -// UnmarshalJSON implements json.Unmarshaler -func (c *ContinueAs) UnmarshalJSON(data []byte) error { - return util.UnmarshalPrimitiveOrObject("continueAs", data, &c.WorkflowID, (*continueAsUnmarshal)(c)) -} - -// ProduceEvent Defines the event (CloudEvent format) to be produced when workflow execution completes or during a -// workflow transitions. The eventRef property must match the name of one of the defined produced events in the -// events definition. -type ProduceEvent struct { - // Reference to a defined unique event name in the events definition - // +kubebuilder:validation:Required - EventRef string `json:"eventRef" validate:"required"` - // If String, expression which selects parts of the states data output to become the data of the produced event. - // If object a custom object to become the data of produced event. - // +optional - Data Object `json:"data,omitempty"` - // Add additional event extension context attributes. - // +optional - ContextAttributes map[string]string `json:"contextAttributes,omitempty"` -} - -// StateDataFilter ... -type StateDataFilter struct { - // Workflow expression to filter the state data input - Input string `json:"input,omitempty"` - // Workflow expression that filters the state data output - Output string `json:"output,omitempty"` -} - -// DataInputSchema Used to validate the workflow data input against a defined JSON Schema -// +builder-gen:new-call=ApplyDefault -type DataInputSchema struct { - // +kubebuilder:validation:Required - Schema *Object `json:"schema" validate:"required"` - // +kubebuilder:validation:Required - FailOnValidationErrors bool `json:"failOnValidationErrors"` -} - -type dataInputSchemaUnmarshal DataInputSchema - -// UnmarshalJSON implements json.Unmarshaler -func (d *DataInputSchema) UnmarshalJSON(data []byte) error { - d.ApplyDefault() - - // expected: data = "{\"key\": \"value\"}" - // data = {"key": "value"} - // data = "file://..." - // data = { "schema": "{\"key\": \"value\"}", "failOnValidationErrors": true } - // data = { "schema": {"key": "value"}, "failOnValidationErrors": true } - // data = { "schema": "file://...", "failOnValidationErrors": true } - - schemaString := "" - err := util.UnmarshalPrimitiveOrObject("dataInputSchema", data, &schemaString, (*dataInputSchemaUnmarshal)(d)) - if err != nil { - return err - } - - if d.Schema != nil { - if d.Schema.Type == Map { - return nil - - } else if d.Schema.Type == String { - schemaString = d.Schema.StringValue - - } else { - return errors.New("invalid dataInputSchema must be a string or object") - } - } - - if schemaString != "" { - data = []byte(schemaString) - if bytes.TrimSpace(data)[0] != '{' { - data = []byte("\"" + schemaString + "\"") - } - } - - d.Schema = new(Object) - return util.UnmarshalObjectOrFile("schema", data, &d.Schema) -} - -// ApplyDefault set the default values for Data Input Schema -func (d *DataInputSchema) ApplyDefault() { - d.FailOnValidationErrors = true -} - -// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your -// Workflow Expressions. -type Secrets []string - -type secretsUnmarshal Secrets - -// UnmarshalJSON implements json.Unmarshaler -func (s *Secrets) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("secrets", data, (*secretsUnmarshal)(s)) -} - -// Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. -type Constants struct { - // Data represents the generic structure of the constants value - // +optional - Data ConstantsData `json:",omitempty"` -} - -// UnmarshalJSON implements json.Unmarshaler -func (c *Constants) UnmarshalJSON(data []byte) error { - return util.UnmarshalObjectOrFile("constants", data, &c.Data) -} - -type ConstantsData map[string]json.RawMessage diff --git a/model/workflow_ref.go b/model/workflow_ref.go deleted file mode 100644 index c1fd1ce..0000000 --- a/model/workflow_ref.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "github.com/serverlessworkflow/sdk-go/v2/util" - -// CompletionType define on how to complete branch execution. -type OnParentCompleteType string - -func (i OnParentCompleteType) KindValues() []string { - return []string{ - string(OnParentCompleteTypeTerminate), - string(OnParentCompleteTypeContinue), - } -} - -func (i OnParentCompleteType) String() string { - return string(i) -} - -const ( - OnParentCompleteTypeTerminate OnParentCompleteType = "terminate" - OnParentCompleteTypeContinue OnParentCompleteType = "continue" -) - -// WorkflowRef holds a reference for a workflow definition -// +builder-gen:new-call=ApplyDefault -type WorkflowRef struct { - // Sub-workflow unique id - // +kubebuilder:validation:Required - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - // +optional - Version string `json:"version,omitempty"` - // Specifies if the subflow should be invoked sync or async. - // Defaults to sync. - // +kubebuilder:validation:Enum=async;sync - // +kubebuilder:default=sync - // +optional - Invoke InvokeKind `json:"invoke,omitempty" validate:"required,oneofkind"` - // onParentComplete specifies how subflow execution should behave when parent workflow completes if invoke - // is 'async'. Defaults to terminate. - // +kubebuilder:validation:Enum=terminate;continue - // +kubebuilder:default=terminate - OnParentComplete OnParentCompleteType `json:"onParentComplete,omitempty" validate:"required,oneofkind"` -} - -type workflowRefUnmarshal WorkflowRef - -// UnmarshalJSON implements json.Unmarshaler -func (s *WorkflowRef) UnmarshalJSON(data []byte) error { - s.ApplyDefault() - return util.UnmarshalPrimitiveOrObject("subFlowRef", data, &s.WorkflowID, (*workflowRefUnmarshal)(s)) -} - -// ApplyDefault set the default values for Workflow Ref -func (s *WorkflowRef) ApplyDefault() { - s.Invoke = InvokeKindSync - s.OnParentComplete = "terminate" -} diff --git a/model/workflow_ref_test.go b/model/workflow_ref_test.go deleted file mode 100644 index 4a69fb5..0000000 --- a/model/workflow_ref_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestWorkflowRefUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowRef - err string - } - testCases := []testCase{ - { - desp: "normal object test", - data: `{"workflowId": "1", "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "2", - Invoke: InvokeKindAsync, - OnParentComplete: "continue", - }, - err: ``, - }, - { - desp: "normal object test & defaults", - data: `{"workflowId": "1"}`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "normal string test", - data: `"1"`, - expect: WorkflowRef{ - WorkflowID: "1", - Version: "", - Invoke: InvokeKindSync, - OnParentComplete: "terminate", - }, - err: ``, - }, - { - desp: "empty data", - data: ` `, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid string format", - data: `"1`, - expect: WorkflowRef{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid json format", - data: `{"workflowId": 1, "version": "2", "invoke": "async", "onParentComplete": "continue"}`, - expect: WorkflowRef{}, - err: "subFlowRef.workflowId must be string", - }, - { - desp: "invalid string or object", - data: `1`, - expect: WorkflowRef{}, - err: `subFlowRef must be string or object`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowRef - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/workflow_ref_validator_test.go b/model/workflow_ref_validator_test.go deleted file mode 100644 index 96a7f9c..0000000 --- a/model/workflow_ref_validator_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import "testing" - -func TestWorkflowRefStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(&baseWorkflow.States[0], true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].OperationState.Actions[0].FunctionRef = nil - baseWorkflow.States[0].OperationState.Actions[0].SubFlowRef = &WorkflowRef{ - WorkflowID: "workflowID", - Invoke: InvokeKindSync, - OnParentComplete: OnParentCompleteTypeTerminate, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.WorkflowID = "" - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.workflowID is required -workflow.states[0].actions[0].subFlowRef.invoke is required -workflow.states[0].actions[0].subFlowRef.onParentComplete is required`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OperationState.Actions[0].SubFlowRef.Invoke = "invalid invoce" - model.States[0].OperationState.Actions[0].SubFlowRef.OnParentComplete = "invalid parent complete" - return *model - }, - Err: `workflow.states[0].actions[0].subFlowRef.invoke need by one of [sync async] -workflow.states[0].actions[0].subFlowRef.onParentComplete need by one of [terminate continue]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} diff --git a/model/workflow_test.go b/model/workflow_test.go deleted file mode 100644 index a5aa42a..0000000 --- a/model/workflow_test.go +++ /dev/null @@ -1,677 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "testing" - - "github.com/serverlessworkflow/sdk-go/v2/util" - "github.com/stretchr/testify/assert" -) - -func TestWorkflowStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Workflow - err string - } - testCases := []testCase{ - { - desp: "start string", - data: `{"start": "start state name"}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{}, - }, - err: ``, - }, - { - desp: "start empty and use the first state", - data: `{"states": [{"name": "start state name", "type": "operation"}]}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - Start: &Start{ - StateName: "start state name", - }, - }, - States: []State{ - { - BaseState: BaseState{ - Name: "start state name", - Type: StateTypeOperation, - }, - OperationState: &OperationState{ - ActionMode: "sequential", - }, - }, - }, - }, - err: ``, - }, - { - desp: "start empty and states empty", - data: `{"states": []}`, - expect: Workflow{ - BaseWorkflow: BaseWorkflow{ - SpecVersion: "0.8", - ExpressionLang: "jq", - }, - States: []State{}, - }, - err: ``, - }, - } - - for _, tc := range testCases[1:] { - t.Run(tc.desp, func(t *testing.T) { - var v Workflow - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestContinueAsUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect ContinueAs - err string - } - testCases := []testCase{ - { - desp: "string", - data: `"1"`, - expect: ContinueAs{ - WorkflowID: "1", - }, - err: ``, - }, - { - desp: "object all field set", - data: `{"workflowId": "1", "version": "2", "data": "3", "workflowExecTimeout": {"duration": "PT1H", "interrupt": true, "runBefore": "4"}}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "2", - Data: FromString("3"), - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "PT1H", - Interrupt: true, - RunBefore: "4", - }, - }, - err: ``, - }, - { - desp: "object optional field unset", - data: `{"workflowId": "1"}`, - expect: ContinueAs{ - WorkflowID: "1", - Version: "", - Data: Object{}, - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "", - Interrupt: false, - RunBefore: "", - }, - }, - err: ``, - }, - { - desp: "invalid string format", - data: `"{`, - expect: ContinueAs{}, - err: `unexpected end of JSON input`, - }, - { - desp: "invalid object format", - data: `{"workflowId": 1}`, - expect: ContinueAs{}, - err: `continueAs.workflowId must be string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v ContinueAs - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestEndUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect End - err string - } - testCases := []testCase{ - { - desp: "bool success", - data: `true`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: "string fail", - data: `"true"`, - expect: End{}, - err: `end must be bool or object`, - }, - { - desp: `object success`, - data: `{"terminate": true}`, - expect: End{ - Terminate: true, - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"terminate": "true"}`, - expect: End{ - Terminate: true, - }, - err: `end.terminate must be bool`, - }, - { - desp: `object key invalid`, - data: `{"terminate_parameter_invalid": true}`, - expect: End{}, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v End - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestWorkflowExecTimeoutUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect WorkflowExecTimeout - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"PT15M"`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: ``, - }, - { - desp: "string fail", - data: `PT15M`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: `invalid character 'P' looking for beginning of value`, - }, - { - desp: `object success`, - data: `{"duration": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"duration": PT15M}`, - expect: WorkflowExecTimeout{ - Duration: "PT15M", - }, - err: `invalid character 'P' looking for beginning of value`, - }, - { - desp: `object key invalid`, - data: `{"duration_invalid": "PT15M"}`, - expect: WorkflowExecTimeout{ - Duration: "unlimited", - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v WorkflowExecTimeout - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestStartUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Start - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"start state"`, - expect: Start{ - StateName: "start state", - }, - err: ``, - }, - { - desp: "string fail", - data: `start state`, - expect: Start{ - StateName: "start state", - }, - err: `invalid character 's' looking for beginning of value`, - }, - { - desp: `object success`, - data: `{"stateName": "start state"}`, - expect: Start{ - StateName: "start state", - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"stateName": start state}`, - expect: Start{ - StateName: "start state", - }, - err: `invalid character 's' looking for beginning of value`, - }, - { - desp: `object key invalid`, - data: `{"stateName_invalid": "start state"}`, - expect: Start{ - StateName: "", - }, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Start - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestCronUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Cron - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"0 15,30,45 * ? * *"`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, - }, - { - desp: "string fail", - data: `0 15,30,45 * ? * *`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `invalid character '1' after top-level value`, - }, - { - desp: `object success`, - data: `{"expression": "0 15,30,45 * ? * *"}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"expression": "0 15,30,45 * ? * *}`, - expect: Cron{ - Expression: "0 15,30,45 * ? * *", - }, - err: `unexpected end of JSON input`, - }, - { - desp: `object key invalid`, - data: `{"expression_invalid": "0 15,30,45 * ? * *"}`, - expect: Cron{}, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Cron - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestTransitionUnmarshalJSON(t *testing.T) { - type testCase struct { - desp string - data string - expect Transition - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: `"next state"`, - expect: Transition{ - NextState: "next state", - }, - err: ``, - }, - { - desp: `object success`, - data: `{"nextState": "next state"}`, - expect: Transition{ - NextState: "next state", - }, - err: ``, - }, - { - desp: `object fail`, - data: `{"nextState": "next state}`, - expect: Transition{ - NextState: "next state", - }, - err: `unexpected end of JSON input`, - }, - { - desp: `object key invalid`, - data: `{"nextState_invalid": "next state"}`, - expect: Transition{}, - err: ``, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Transition - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} - -func TestDataInputSchemaUnmarshalJSON(t *testing.T) { - - var schemaName Object - err := json.Unmarshal([]byte("{\"key\": \"value\"}"), &schemaName) - if !assert.NoError(t, err) { - return - } - - type testCase struct { - desp string - data string - expect DataInputSchema - err string - } - - testCases := []testCase{ - { - desp: "string success", - data: "{\"key\": \"value\"}", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: "string fail", - data: "{\"key\": }", - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `invalid character '}' looking for beginning of value`, - }, - { - desp: `object success (without quotes)`, - data: `{"key": "value"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success`, - data: `{"schema": "{\"key\": \"value\"}"}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object success (without quotes)`, - data: `{"schema": {"key": "value"}}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: ``, - }, - { - desp: `schema object fail`, - data: `{"schema": "schema name}`, - expect: DataInputSchema{ - Schema: &schemaName, - FailOnValidationErrors: true, - }, - err: `unexpected end of JSON input`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v DataInputSchema - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err, tc.desp) - assert.Regexp(t, tc.err, err, tc.desp) - return - } - - assert.NoError(t, err, tc.desp) - assert.Equal(t, tc.expect.Schema, v.Schema, tc.desp) - assert.Equal(t, tc.expect.FailOnValidationErrors, v.FailOnValidationErrors, tc.desp) - }) - } -} - -func TestConstantsUnmarshalJSON(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"testkey":"testvalue"}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - util.HttpClient = *server.Client() - - type testCase struct { - desp string - data string - expect Constants - err string - } - testCases := []testCase{ - { - desp: "object success", - data: `{"testkey":"testvalue}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `unexpected end of JSON input`, - }, - { - desp: "object success", - data: `[]`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - // TODO: improve message: field is empty - err: `constants must be string or object`, - }, - { - desp: "object success", - data: `{"testkey":"testvalue"}`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: ``, - }, - { - desp: "file success", - data: fmt.Sprintf(`"%s/test.json"`, server.URL), - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: ``, - }, - { - desp: "file success", - data: `"uri_invalid"`, - expect: Constants{ - Data: ConstantsData{ - "testkey": []byte(`"testvalue"`), - }, - }, - err: `file not found: "uri_invalid"`, - }, - } - - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - var v Constants - err := json.Unmarshal([]byte(tc.data), &v) - - if tc.err != "" { - assert.Error(t, err) - assert.Equal(t, tc.err, err.Error()) - return - } - - assert.NoError(t, err) - assert.Equal(t, tc.expect, v) - }) - } -} diff --git a/model/workflow_validator.go b/model/workflow_validator.go deleted file mode 100644 index fd3d7bb..0000000 --- a/model/workflow_validator.go +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "context" - - validator "github.com/go-playground/validator/v10" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -type contextValueKey string - -const ValidatorContextValue contextValueKey = "value" - -type WorkflowValidator func(mapValues ValidatorContext, sl validator.StructLevel) - -func ValidationWrap(fnCtx WorkflowValidator) validator.StructLevelFuncCtx { - return func(ctx context.Context, structLevel validator.StructLevel) { - if fnCtx != nil { - if mapValues, ok := ctx.Value(ValidatorContextValue).(ValidatorContext); ok { - fnCtx(mapValues, structLevel) - } - } - } -} - -// +builder-gen:ignore=true -type ValidatorContext struct { - States map[string]State - Functions map[string]Function - Events map[string]Event - Retries map[string]Retry - Errors map[string]Error -} - -func (c *ValidatorContext) init(workflow *Workflow) { - c.States = make(map[string]State, len(workflow.States)) - for _, state := range workflow.States { - c.States[state.BaseState.Name] = state - } - - c.Functions = make(map[string]Function, len(workflow.Functions)) - for _, function := range workflow.Functions { - c.Functions[function.Name] = function - } - - c.Events = make(map[string]Event, len(workflow.Events)) - for _, event := range workflow.Events { - c.Events[event.Name] = event - } - - c.Retries = make(map[string]Retry, len(workflow.Retries)) - for _, retry := range workflow.Retries { - c.Retries[retry.Name] = retry - } - - c.Errors = make(map[string]Error, len(workflow.Errors)) - for _, error := range workflow.Errors { - c.Errors[error.Name] = error - } -} - -func (c *ValidatorContext) ExistState(name string) bool { - if c.States == nil { - return true - } - _, ok := c.States[name] - return ok -} - -func (c *ValidatorContext) ExistFunction(name string) bool { - if c.Functions == nil { - return true - } - _, ok := c.Functions[name] - return ok -} - -func (c *ValidatorContext) ExistEvent(name string) bool { - if c.Events == nil { - return true - } - _, ok := c.Events[name] - return ok -} - -func (c *ValidatorContext) ExistRetry(name string) bool { - if c.Retries == nil { - return true - } - _, ok := c.Retries[name] - return ok -} - -func (c *ValidatorContext) ExistError(name string) bool { - if c.Errors == nil { - return true - } - _, ok := c.Errors[name] - return ok -} - -func NewValidatorContext(object any) context.Context { - contextValue := ValidatorContext{} - - if workflow, ok := object.(*Workflow); ok { - for i := range workflow.States { - s := &workflow.States[i] - if s.BaseState.Transition != nil { - s.BaseState.Transition.stateParent = s - } - for _, onError := range s.BaseState.OnErrors { - if onError.Transition != nil { - onError.Transition.stateParent = s - } - } - if s.Type == StateTypeSwitch { - if s.SwitchState.DefaultCondition.Transition != nil { - s.SwitchState.DefaultCondition.Transition.stateParent = s - } - for _, e := range s.SwitchState.EventConditions { - if e.Transition != nil { - e.Transition.stateParent = s - } - } - for _, d := range s.SwitchState.DataConditions { - if d.Transition != nil { - d.Transition.stateParent = s - } - } - } - } - contextValue.init(workflow) - } - - return context.WithValue(context.Background(), ValidatorContextValue, contextValue) -} - -func init() { - // TODO: create states graph to complex check - - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(onErrorStructLevelValidationCtx), OnError{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(transitionStructLevelValidationCtx), Transition{}) - val.GetValidator().RegisterStructValidationCtx(ValidationWrap(startStructLevelValidationCtx), Start{}) -} - -func startStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - start := structLevel.Current().Interface().(Start) - if start.StateName != "" && !ctx.ExistState(start.StateName) { - structLevel.ReportError(start.StateName, "StateName", "stateName", val.TagExists, "") - return - } -} - -func onErrorStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - onError := structLevel.Current().Interface().(OnError) - hasErrorRef := onError.ErrorRef != "" - hasErrorRefs := len(onError.ErrorRefs) > 0 - - if !hasErrorRef && !hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagRequired, "") - } else if hasErrorRef && hasErrorRefs { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExclusive, "") - return - } - - if onError.ErrorRef != "" && !ctx.ExistError(onError.ErrorRef) { - structLevel.ReportError(onError.ErrorRef, "ErrorRef", "ErrorRef", val.TagExists, "") - } - - for _, errorRef := range onError.ErrorRefs { - if !ctx.ExistError(errorRef) { - structLevel.ReportError(onError.ErrorRefs, "ErrorRefs", "ErrorRefs", val.TagExists, "") - } - } -} - -func transitionStructLevelValidationCtx(ctx ValidatorContext, structLevel validator.StructLevel) { - // Naive check if transitions exist - transition := structLevel.Current().Interface().(Transition) - if ctx.ExistState(transition.NextState) { - if transition.stateParent != nil { - parentBaseState := transition.stateParent - - if parentBaseState.Name == transition.NextState { - // TODO: Improve recursive check - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagRecursiveState, parentBaseState.Name) - } - - if parentBaseState.UsedForCompensation && !ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionUseForCompensation, "") - } - - if !parentBaseState.UsedForCompensation && ctx.States[transition.NextState].BaseState.UsedForCompensation { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagTransitionMainWorkflow, "") - } - } - - } else { - structLevel.ReportError(transition.NextState, "NextState", "NextState", val.TagExists, "") - } -} - -func validTransitionAndEnd(structLevel validator.StructLevel, field any, transition *Transition, end *End) { - hasTransition := transition != nil - isEnd := end != nil && (end.Terminate || end.ContinueAs != nil || len(end.ProduceEvents) > 0) // TODO: check the spec continueAs/produceEvents to see how it influences the end - - if !hasTransition && !isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagRequired, "") - } else if hasTransition && isEnd { - structLevel.ReportError(field, "Transition", "transition", val.TagExclusive, "") - } -} - -func validationNotExclusiveParamters(values []bool) bool { - hasOne := false - hasTwo := false - - for i, val1 := range values { - if val1 { - hasOne = true - for j, val2 := range values { - if i != j && val2 { - hasTwo = true - break - } - } - break - } - } - - return hasOne && hasTwo -} diff --git a/model/workflow_validator_test.go b/model/workflow_validator_test.go deleted file mode 100644 index 2a6b5a0..0000000 --- a/model/workflow_validator_test.go +++ /dev/null @@ -1,544 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -func buildWorkflow() *Workflow { - return &Workflow{ - BaseWorkflow: BaseWorkflow{ - ID: "id", - Key: "key", - Name: "name", - SpecVersion: "0.8", - Version: "0.1", - ExpressionLang: JqExpressionLang, - }, - } -} - -func buildEndByState(state *State, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - state.BaseState.End = end - return end -} - -func buildEndByDefaultCondition(defaultCondition *DefaultCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - defaultCondition.End = end - return end -} - -func buildEndByDataCondition(dataCondition *DataCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - dataCondition.End = end - return end -} - -func buildEndByEventCondition(eventCondition *EventCondition, terminate, compensate bool) *End { - end := &End{ - Terminate: terminate, - Compensate: compensate, - } - eventCondition.End = end - return end -} - -func buildStart(workflow *Workflow, state *State) { - start := &Start{ - StateName: state.BaseState.Name, - } - workflow.BaseWorkflow.Start = start -} - -func buildTransitionByState(state, nextState *State, compensate bool) { - state.BaseState.Transition = &Transition{ - NextState: nextState.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDataCondition(dataCondition *DataCondition, state *State, compensate bool) { - dataCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByEventCondition(eventCondition *EventCondition, state *State, compensate bool) { - eventCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - Compensate: compensate, - } -} - -func buildTransitionByDefaultCondition(defaultCondition *DefaultCondition, state *State) { - defaultCondition.Transition = &Transition{ - NextState: state.BaseState.Name, - } -} - -func buildTimeouts(workflow *Workflow) *Timeouts { - timeouts := Timeouts{} - workflow.BaseWorkflow.Timeouts = &timeouts - return workflow.BaseWorkflow.Timeouts -} - -func TestBaseWorkflowStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "id exclude key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "id" - model.Key = "" - return *model - }, - }, - { - Desp: "key exclude id", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "key" - return *model - }, - }, - { - Desp: "without id and key", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.ID = "" - model.Key = "" - return *model - }, - Err: `workflow.id required when "workflow.key" is not defined -workflow.key required when "workflow.id" is not defined`, - }, - { - Desp: "oneofkind", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.BaseWorkflow.ExpressionLang = JqExpressionLang + "invalid" - return *model - }, - Err: `workflow.expressionLang need by one of [jq jsonpath cel]`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestContinueAsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.States[0].BaseState.End.ContinueAs = &ContinueAs{ - WorkflowID: "sub workflow", - WorkflowExecTimeout: WorkflowExecTimeout{ - Duration: "P1M", - }, - } - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.End.ContinueAs.WorkflowID = "" - return *model - }, - Err: `workflow.states[0].end.continueAs.workflowID is required`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestOnErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - baseWorkflow.States[0].BaseState.OnErrors = []OnError{{ - ErrorRef: "error 1", - }, { - ErrorRefs: []string{"error 1", "error 2"}, - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef is required`, - }, - { - Desp: "exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[0].ErrorRef = "error 1" - model.States[0].OnErrors[0].ErrorRefs = []string{"error 2"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists and exclusive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef or workflow.states[0].onErrors[0].errorRefs are exclusive`, - }, - { - Desp: "exists errorRef", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "invalid error name" - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRef don't exist "invalid error name"`, - }, - { - Desp: "exists errorRefs", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.OnErrors[0].ErrorRef = "" - model.States[0].BaseState.OnErrors[0].ErrorRefs = []string{"invalid error name"} - return *model - }, - Err: `workflow.states[0].onErrors[0].errorRefs don't exist ["invalid error name"]`, - }, - { - Desp: "duplicate", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].OnErrors[1].ErrorRefs = []string{"error 1", "error 1"} - return *model - }, - Err: `workflow.states[0].onErrors[1].errorRefs has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestStartStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildStart(baseWorkflow, operationState) - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "" - return *model - }, - Err: `workflow.start.stateName is required`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Start.StateName = "start state not found" - return *model - }, - Err: `workflow.start.stateName don't exist "start state not found"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestTransitionStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - baseWorkflow.States = make(States, 0, 5) - - operationState := buildOperationState(baseWorkflow, "start state") - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - operationState2 := buildOperationState(baseWorkflow, "next state") - buildEndByState(operationState2, true, false) - operationState2.BaseState.CompensatedBy = "compensation next state 1" - action2 := buildActionByOperationState(operationState2, "action 1") - buildFunctionRef(baseWorkflow, action2, "function 2") - - buildTransitionByState(operationState, operationState2, false) - - operationState3 := buildOperationState(baseWorkflow, "compensation next state 1") - operationState3.BaseState.UsedForCompensation = true - action3 := buildActionByOperationState(operationState3, "action 1") - buildFunctionRef(baseWorkflow, action3, "function 3") - - operationState4 := buildOperationState(baseWorkflow, "compensation next state 2") - operationState4.BaseState.UsedForCompensation = true - action4 := buildActionByOperationState(operationState4, "action 1") - buildFunctionRef(baseWorkflow, action4, "function 4") - - buildTransitionByState(operationState3, operationState4, false) - - operationState5 := buildOperationState(baseWorkflow, "compensation next state 3") - buildEndByState(operationState5, true, false) - operationState5.BaseState.UsedForCompensation = true - action5 := buildActionByOperationState(operationState5, "action 5") - buildFunctionRef(baseWorkflow, action5, "function 5") - - buildTransitionByState(operationState4, operationState5, false) - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "state recursive", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[0].BaseState.Name - return *model - }, - Err: `workflow.states[0].transition.nextState can't no be recursive "start state"`, - }, - { - Desp: "exists", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = "invalid next state" - return *model - }, - Err: `workflow.states[0].transition.nextState don't exist "invalid next state"`, - }, - { - Desp: "transitionusedforcompensation", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[3].BaseState.UsedForCompensation = false - return *model - }, - Err: `Key: 'Workflow.States[2].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transitionusedforcompensation' tag -Key: 'Workflow.States[3].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - { - Desp: "transtionmainworkflow", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.States[0].BaseState.Transition.NextState = model.States[3].BaseState.Name - return *model - }, - Err: `Key: 'Workflow.States[0].BaseState.Transition.NextState' Error:Field validation for 'NextState' failed on the 'transtionmainworkflow' tag`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestDataInputSchemaStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - sampleSchema := FromString("sample schema") - - testCases := []ValidationCase{ - { - Desp: "empty DataInputSchema", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{} - return *model - }, - Err: `workflow.dataInputSchema.schema is required`, - }, - { - Desp: "filled Schema, default failOnValidationErrors", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.DataInputSchema = &DataInputSchema{ - Schema: &sampleSchema, - } - return *model - }, - }, - } - - //fmt.Printf("%+v", testCases[0].Model) - StructLevelValidationCtx(t, testCases) -} - -func TestSecretsStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - testCases := []ValidationCase{ - { - Desp: "workflow secrets.name repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Secrets = []string{"secret 1", "secret 1"} - return *model - }, - Err: `workflow.secrets has duplicate value`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -func TestErrorStructLevelValidation(t *testing.T) { - baseWorkflow := buildWorkflow() - - operationState := buildOperationState(baseWorkflow, "start state") - buildEndByState(operationState, true, false) - action1 := buildActionByOperationState(operationState, "action 1") - buildFunctionRef(baseWorkflow, action1, "function 1") - - baseWorkflow.BaseWorkflow.Errors = Errors{{ - Name: "error 1", - }, { - Name: "error 2", - }} - - testCases := []ValidationCase{ - { - Desp: "success", - Model: func() Workflow { - return *baseWorkflow.DeepCopy() - }, - }, - { - Desp: "required", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors[0].Name = "" - return *model - }, - Err: `workflow.errors[0].name is required`, - }, - { - Desp: "repeat", - Model: func() Workflow { - model := baseWorkflow.DeepCopy() - model.Errors = Errors{model.Errors[0], model.Errors[0]} - return *model - }, - Err: `workflow.errors has duplicate "name"`, - }, - } - - StructLevelValidationCtx(t, testCases) -} - -type ValidationCase struct { - Desp string - Model func() Workflow - Err string -} - -func StructLevelValidationCtx(t *testing.T, testCases []ValidationCase) { - for _, tc := range testCases { - t.Run(tc.Desp, func(t *testing.T) { - model := tc.Model() - err := val.GetValidator().StructCtx(NewValidatorContext(&model), model) - err = val.WorkflowError(err) - if tc.Err != "" { - if assert.Error(t, err) { - assert.Equal(t, tc.Err, err.Error()) - } - } else { - assert.NoError(t, err) - } - }) - } -} diff --git a/model/zz_generated.buildergen.go b/model/zz_generated.buildergen.go deleted file mode 100644 index 42564fe..0000000 --- a/model/zz_generated.buildergen.go +++ /dev/null @@ -1,3139 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by builder-gen. DO NOT EDIT. - -package model - -import ( - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionBuilder() *ActionBuilder { - builder := &ActionBuilder{} - builder.model = Action{} - builder.model.ApplyDefault() - builder.actiondatafilter = NewActionDataFilterBuilder() - return builder -} - -type ActionBuilder struct { - model Action - functionref *FunctionRefBuilder - eventref *EventRefBuilder - subflowref *WorkflowRefBuilder - sleep *SleepBuilder - actiondatafilter *ActionDataFilterBuilder -} - -func (b *ActionBuilder) ID(input string) *ActionBuilder { - b.model.ID = input - return b -} - -func (b *ActionBuilder) Name(input string) *ActionBuilder { - b.model.Name = input - return b -} - -func (b *ActionBuilder) FunctionRef() *FunctionRefBuilder { - if b.functionref == nil { - b.functionref = NewFunctionRefBuilder() - } - return b.functionref -} - -func (b *ActionBuilder) EventRef() *EventRefBuilder { - if b.eventref == nil { - b.eventref = NewEventRefBuilder() - } - return b.eventref -} - -func (b *ActionBuilder) SubFlowRef() *WorkflowRefBuilder { - if b.subflowref == nil { - b.subflowref = NewWorkflowRefBuilder() - } - return b.subflowref -} - -func (b *ActionBuilder) Sleep() *SleepBuilder { - if b.sleep == nil { - b.sleep = NewSleepBuilder() - } - return b.sleep -} - -func (b *ActionBuilder) RetryRef(input string) *ActionBuilder { - b.model.RetryRef = input - return b -} - -func (b *ActionBuilder) NonRetryableErrors(input []string) *ActionBuilder { - b.model.NonRetryableErrors = input - return b -} - -func (b *ActionBuilder) RetryableErrors(input []string) *ActionBuilder { - b.model.RetryableErrors = input - return b -} - -func (b *ActionBuilder) ActionDataFilter() *ActionDataFilterBuilder { - return b.actiondatafilter -} - -func (b *ActionBuilder) Condition(input string) *ActionBuilder { - b.model.Condition = input - return b -} - -func (b *ActionBuilder) Build() Action { - if b.functionref != nil { - functionref := b.functionref.Build() - b.model.FunctionRef = &functionref - } - if b.eventref != nil { - eventref := b.eventref.Build() - b.model.EventRef = &eventref - } - if b.subflowref != nil { - subflowref := b.subflowref.Build() - b.model.SubFlowRef = &subflowref - } - if b.sleep != nil { - sleep := b.sleep.Build() - b.model.Sleep = &sleep - } - b.model.ActionDataFilter = b.actiondatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewActionDataFilterBuilder() *ActionDataFilterBuilder { - builder := &ActionDataFilterBuilder{} - builder.model = ActionDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type ActionDataFilterBuilder struct { - model ActionDataFilter -} - -func (b *ActionDataFilterBuilder) FromStateData(input string) *ActionDataFilterBuilder { - b.model.FromStateData = input - return b -} - -func (b *ActionDataFilterBuilder) UseResults(input bool) *ActionDataFilterBuilder { - b.model.UseResults = input - return b -} - -func (b *ActionDataFilterBuilder) Results(input string) *ActionDataFilterBuilder { - b.model.Results = input - return b -} - -func (b *ActionDataFilterBuilder) ToStateData(input string) *ActionDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *ActionDataFilterBuilder) Build() ActionDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthBuilder() *AuthBuilder { - builder := &AuthBuilder{} - builder.model = Auth{} - builder.properties = NewAuthPropertiesBuilder() - return builder -} - -type AuthBuilder struct { - model Auth - properties *AuthPropertiesBuilder -} - -func (b *AuthBuilder) Name(input string) *AuthBuilder { - b.model.Name = input - return b -} - -func (b *AuthBuilder) Scheme(input AuthType) *AuthBuilder { - b.model.Scheme = input - return b -} - -func (b *AuthBuilder) Properties() *AuthPropertiesBuilder { - return b.properties -} - -func (b *AuthBuilder) Build() Auth { - b.model.Properties = b.properties.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthPropertiesBuilder() *AuthPropertiesBuilder { - builder := &AuthPropertiesBuilder{} - builder.model = AuthProperties{} - return builder -} - -type AuthPropertiesBuilder struct { - model AuthProperties - basic *BasicAuthPropertiesBuilder - bearer *BearerAuthPropertiesBuilder - oauth2 *OAuth2AuthPropertiesBuilder -} - -func (b *AuthPropertiesBuilder) Basic() *BasicAuthPropertiesBuilder { - if b.basic == nil { - b.basic = NewBasicAuthPropertiesBuilder() - } - return b.basic -} - -func (b *AuthPropertiesBuilder) Bearer() *BearerAuthPropertiesBuilder { - if b.bearer == nil { - b.bearer = NewBearerAuthPropertiesBuilder() - } - return b.bearer -} - -func (b *AuthPropertiesBuilder) OAuth2() *OAuth2AuthPropertiesBuilder { - if b.oauth2 == nil { - b.oauth2 = NewOAuth2AuthPropertiesBuilder() - } - return b.oauth2 -} - -func (b *AuthPropertiesBuilder) Build() AuthProperties { - if b.basic != nil { - basic := b.basic.Build() - b.model.Basic = &basic - } - if b.bearer != nil { - bearer := b.bearer.Build() - b.model.Bearer = &bearer - } - if b.oauth2 != nil { - oauth2 := b.oauth2.Build() - b.model.OAuth2 = &oauth2 - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewAuthsBuilder() *AuthsBuilder { - builder := &AuthsBuilder{} - builder.model = Auths{} - return builder -} - -type AuthsBuilder struct { - model Auths -} - -func (b *AuthsBuilder) Build() Auths { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseStateBuilder() *BaseStateBuilder { - builder := &BaseStateBuilder{} - builder.model = BaseState{} - builder.onerrors = []*OnErrorBuilder{} - return builder -} - -type BaseStateBuilder struct { - model BaseState - onerrors []*OnErrorBuilder - transition *TransitionBuilder - statedatafilter *StateDataFilterBuilder - end *EndBuilder -} - -func (b *BaseStateBuilder) ID(input string) *BaseStateBuilder { - b.model.ID = input - return b -} - -func (b *BaseStateBuilder) Name(input string) *BaseStateBuilder { - b.model.Name = input - return b -} - -func (b *BaseStateBuilder) Type(input StateType) *BaseStateBuilder { - b.model.Type = input - return b -} - -func (b *BaseStateBuilder) AddOnErrors() *OnErrorBuilder { - builder := NewOnErrorBuilder() - b.onerrors = append(b.onerrors, builder) - return builder -} - -func (b *BaseStateBuilder) RemoveOnErrors(remove *OnErrorBuilder) { - for i, val := range b.onerrors { - if val == remove { - b.onerrors[i] = b.onerrors[len(b.onerrors)-1] - b.onerrors = b.onerrors[:len(b.onerrors)-1] - } - } -} -func (b *BaseStateBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *BaseStateBuilder) StateDataFilter() *StateDataFilterBuilder { - if b.statedatafilter == nil { - b.statedatafilter = NewStateDataFilterBuilder() - } - return b.statedatafilter -} - -func (b *BaseStateBuilder) CompensatedBy(input string) *BaseStateBuilder { - b.model.CompensatedBy = input - return b -} - -func (b *BaseStateBuilder) UsedForCompensation(input bool) *BaseStateBuilder { - b.model.UsedForCompensation = input - return b -} - -func (b *BaseStateBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *BaseStateBuilder) Build() BaseState { - b.model.OnErrors = []OnError{} - for _, v := range b.onerrors { - b.model.OnErrors = append(b.model.OnErrors, v.Build()) - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.statedatafilter != nil { - statedatafilter := b.statedatafilter.Build() - b.model.StateDataFilter = &statedatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBaseWorkflowBuilder() *BaseWorkflowBuilder { - builder := &BaseWorkflowBuilder{} - builder.model = BaseWorkflow{} - builder.model.ApplyDefault() - builder.errors = []*ErrorBuilder{} - builder.auth = []*AuthBuilder{} - return builder -} - -type BaseWorkflowBuilder struct { - model BaseWorkflow - start *StartBuilder - datainputschema *DataInputSchemaBuilder - constants *ConstantsBuilder - timeouts *TimeoutsBuilder - errors []*ErrorBuilder - auth []*AuthBuilder -} - -func (b *BaseWorkflowBuilder) ID(input string) *BaseWorkflowBuilder { - b.model.ID = input - return b -} - -func (b *BaseWorkflowBuilder) Key(input string) *BaseWorkflowBuilder { - b.model.Key = input - return b -} - -func (b *BaseWorkflowBuilder) Name(input string) *BaseWorkflowBuilder { - b.model.Name = input - return b -} - -func (b *BaseWorkflowBuilder) Description(input string) *BaseWorkflowBuilder { - b.model.Description = input - return b -} - -func (b *BaseWorkflowBuilder) Version(input string) *BaseWorkflowBuilder { - b.model.Version = input - return b -} - -func (b *BaseWorkflowBuilder) Start() *StartBuilder { - if b.start == nil { - b.start = NewStartBuilder() - } - return b.start -} - -func (b *BaseWorkflowBuilder) Annotations(input []string) *BaseWorkflowBuilder { - b.model.Annotations = input - return b -} - -func (b *BaseWorkflowBuilder) DataInputSchema() *DataInputSchemaBuilder { - if b.datainputschema == nil { - b.datainputschema = NewDataInputSchemaBuilder() - } - return b.datainputschema -} - -func (b *BaseWorkflowBuilder) SpecVersion(input string) *BaseWorkflowBuilder { - b.model.SpecVersion = input - return b -} - -func (b *BaseWorkflowBuilder) Secrets(input Secrets) *BaseWorkflowBuilder { - b.model.Secrets = input - return b -} - -func (b *BaseWorkflowBuilder) Constants() *ConstantsBuilder { - if b.constants == nil { - b.constants = NewConstantsBuilder() - } - return b.constants -} - -func (b *BaseWorkflowBuilder) ExpressionLang(input ExpressionLangType) *BaseWorkflowBuilder { - b.model.ExpressionLang = input - return b -} - -func (b *BaseWorkflowBuilder) Timeouts() *TimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BaseWorkflowBuilder) AddErrors() *ErrorBuilder { - builder := NewErrorBuilder() - b.errors = append(b.errors, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveErrors(remove *ErrorBuilder) { - for i, val := range b.errors { - if val == remove { - b.errors[i] = b.errors[len(b.errors)-1] - b.errors = b.errors[:len(b.errors)-1] - } - } -} -func (b *BaseWorkflowBuilder) KeepActive(input bool) *BaseWorkflowBuilder { - b.model.KeepActive = input - return b -} - -func (b *BaseWorkflowBuilder) Metadata(input Metadata) *BaseWorkflowBuilder { - b.model.Metadata = input - return b -} - -func (b *BaseWorkflowBuilder) AutoRetries(input bool) *BaseWorkflowBuilder { - b.model.AutoRetries = input - return b -} - -func (b *BaseWorkflowBuilder) AddAuth() *AuthBuilder { - builder := NewAuthBuilder() - b.auth = append(b.auth, builder) - return builder -} - -func (b *BaseWorkflowBuilder) RemoveAuth(remove *AuthBuilder) { - for i, val := range b.auth { - if val == remove { - b.auth[i] = b.auth[len(b.auth)-1] - b.auth = b.auth[:len(b.auth)-1] - } - } -} -func (b *BaseWorkflowBuilder) Build() BaseWorkflow { - if b.start != nil { - start := b.start.Build() - b.model.Start = &start - } - if b.datainputschema != nil { - datainputschema := b.datainputschema.Build() - b.model.DataInputSchema = &datainputschema - } - if b.constants != nil { - constants := b.constants.Build() - b.model.Constants = &constants - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - b.model.Errors = []Error{} - for _, v := range b.errors { - b.model.Errors = append(b.model.Errors, v.Build()) - } - b.model.Auth = []Auth{} - for _, v := range b.auth { - b.model.Auth = append(b.model.Auth, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBasicAuthPropertiesBuilder() *BasicAuthPropertiesBuilder { - builder := &BasicAuthPropertiesBuilder{} - builder.model = BasicAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BasicAuthPropertiesBuilder struct { - model BasicAuthProperties - CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BasicAuthPropertiesBuilder) Secret(input string) *BasicAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Username(input string) *BasicAuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Password(input string) *BasicAuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *BasicAuthPropertiesBuilder) Build() BasicAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBearerAuthPropertiesBuilder() *BearerAuthPropertiesBuilder { - builder := &BearerAuthPropertiesBuilder{} - builder.model = BearerAuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type BearerAuthPropertiesBuilder struct { - model BearerAuthProperties - CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *BearerAuthPropertiesBuilder) Secret(input string) *BearerAuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Token(input string) *BearerAuthPropertiesBuilder { - b.model.Token = input - return b -} - -func (b *BearerAuthPropertiesBuilder) Build() BearerAuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchBuilder() *BranchBuilder { - builder := &BranchBuilder{} - builder.model = Branch{} - builder.actions = []*ActionBuilder{} - return builder -} - -type BranchBuilder struct { - model Branch - actions []*ActionBuilder - timeouts *BranchTimeoutsBuilder -} - -func (b *BranchBuilder) Name(input string) *BranchBuilder { - b.model.Name = input - return b -} - -func (b *BranchBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *BranchBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *BranchBuilder) Timeouts() *BranchTimeoutsBuilder { - if b.timeouts == nil { - b.timeouts = NewBranchTimeoutsBuilder() - } - return b.timeouts -} - -func (b *BranchBuilder) Build() Branch { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewBranchTimeoutsBuilder() *BranchTimeoutsBuilder { - builder := &BranchTimeoutsBuilder{} - builder.model = BranchTimeouts{} - return builder -} - -type BranchTimeoutsBuilder struct { - model BranchTimeouts -} - -func (b *BranchTimeoutsBuilder) ActionExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) BranchExecTimeout(input string) *BranchTimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *BranchTimeoutsBuilder) Build() BranchTimeouts { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateBuilder() *CallbackStateBuilder { - builder := &CallbackStateBuilder{} - builder.model = CallbackState{} - builder.action = NewActionBuilder() - return builder -} - -type CallbackStateBuilder struct { - model CallbackState - action *ActionBuilder - timeouts *CallbackStateTimeoutBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *CallbackStateBuilder) Action() *ActionBuilder { - return b.action -} - -func (b *CallbackStateBuilder) EventRef(input string) *CallbackStateBuilder { - b.model.EventRef = input - return b -} - -func (b *CallbackStateBuilder) Timeouts() *CallbackStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewCallbackStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *CallbackStateBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *CallbackStateBuilder) Build() CallbackState { - b.model.Action = b.action.Build() - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCallbackStateTimeoutBuilder() *CallbackStateTimeoutBuilder { - builder := &CallbackStateTimeoutBuilder{} - builder.model = CallbackStateTimeout{} - return builder -} - -type CallbackStateTimeoutBuilder struct { - model CallbackStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *CallbackStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *CallbackStateTimeoutBuilder) ActionExecTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) EventTimeout(input string) *CallbackStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *CallbackStateTimeoutBuilder) Build() CallbackStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCommonBuilder() *CommonBuilder { - builder := &CommonBuilder{} - builder.model = Common{} - return builder -} - -type CommonBuilder struct { - model Common -} - -func (b *CommonBuilder) Metadata(input Metadata) *CommonBuilder { - b.model.Metadata = input - return b -} - -func (b *CommonBuilder) Build() Common { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsBuilder() *ConstantsBuilder { - builder := &ConstantsBuilder{} - builder.model = Constants{} - return builder -} - -type ConstantsBuilder struct { - model Constants -} - -func (b *ConstantsBuilder) Data(input ConstantsData) *ConstantsBuilder { - b.model.Data = input - return b -} - -func (b *ConstantsBuilder) Build() Constants { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewConstantsDataBuilder() *ConstantsDataBuilder { - builder := &ConstantsDataBuilder{} - builder.model = ConstantsData{} - return builder -} - -type ConstantsDataBuilder struct { - model ConstantsData -} - -func (b *ConstantsDataBuilder) Build() ConstantsData { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewContinueAsBuilder() *ContinueAsBuilder { - builder := &ContinueAsBuilder{} - builder.model = ContinueAs{} - builder.data = NewObjectBuilder() - builder.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - return builder -} - -type ContinueAsBuilder struct { - model ContinueAs - data *ObjectBuilder - workflowexectimeout *WorkflowExecTimeoutBuilder -} - -func (b *ContinueAsBuilder) WorkflowID(input string) *ContinueAsBuilder { - b.model.WorkflowID = input - return b -} - -func (b *ContinueAsBuilder) Version(input string) *ContinueAsBuilder { - b.model.Version = input - return b -} - -func (b *ContinueAsBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ContinueAsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - return b.workflowexectimeout -} - -func (b *ContinueAsBuilder) Build() ContinueAs { - b.model.Data = b.data.Build() - b.model.WorkflowExecTimeout = b.workflowexectimeout.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCorrelationBuilder() *CorrelationBuilder { - builder := &CorrelationBuilder{} - builder.model = Correlation{} - return builder -} - -type CorrelationBuilder struct { - model Correlation -} - -func (b *CorrelationBuilder) ContextAttributeName(input string) *CorrelationBuilder { - b.model.ContextAttributeName = input - return b -} - -func (b *CorrelationBuilder) ContextAttributeValue(input string) *CorrelationBuilder { - b.model.ContextAttributeValue = input - return b -} - -func (b *CorrelationBuilder) Build() Correlation { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewCronBuilder() *CronBuilder { - builder := &CronBuilder{} - builder.model = Cron{} - return builder -} - -type CronBuilder struct { - model Cron -} - -func (b *CronBuilder) Expression(input string) *CronBuilder { - b.model.Expression = input - return b -} - -func (b *CronBuilder) ValidUntil(input string) *CronBuilder { - b.model.ValidUntil = input - return b -} - -func (b *CronBuilder) Build() Cron { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataConditionBuilder() *DataConditionBuilder { - builder := &DataConditionBuilder{} - builder.model = DataCondition{} - return builder -} - -type DataConditionBuilder struct { - model DataCondition - end *EndBuilder - transition *TransitionBuilder -} - -func (b *DataConditionBuilder) Name(input string) *DataConditionBuilder { - b.model.Name = input - return b -} - -func (b *DataConditionBuilder) Condition(input string) *DataConditionBuilder { - b.model.Condition = input - return b -} - -func (b *DataConditionBuilder) Metadata(input Metadata) *DataConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *DataConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DataConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DataConditionBuilder) Build() DataCondition { - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDataInputSchemaBuilder() *DataInputSchemaBuilder { - builder := &DataInputSchemaBuilder{} - builder.model = DataInputSchema{} - builder.model.ApplyDefault() - return builder -} - -type DataInputSchemaBuilder struct { - model DataInputSchema - schema *ObjectBuilder -} - -func (b *DataInputSchemaBuilder) Schema() *ObjectBuilder { - if b.schema == nil { - b.schema = NewObjectBuilder() - } - return b.schema -} - -func (b *DataInputSchemaBuilder) FailOnValidationErrors(input bool) *DataInputSchemaBuilder { - b.model.FailOnValidationErrors = input - return b -} - -func (b *DataInputSchemaBuilder) Build() DataInputSchema { - if b.schema != nil { - schema := b.schema.Build() - b.model.Schema = &schema - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDefaultConditionBuilder() *DefaultConditionBuilder { - builder := &DefaultConditionBuilder{} - builder.model = DefaultCondition{} - return builder -} - -type DefaultConditionBuilder struct { - model DefaultCondition - transition *TransitionBuilder - end *EndBuilder -} - -func (b *DefaultConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *DefaultConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *DefaultConditionBuilder) Build() DefaultCondition { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewDelayStateBuilder() *DelayStateBuilder { - builder := &DelayStateBuilder{} - builder.model = DelayState{} - return builder -} - -type DelayStateBuilder struct { - model DelayState -} - -func (b *DelayStateBuilder) TimeDelay(input string) *DelayStateBuilder { - b.model.TimeDelay = input - return b -} - -func (b *DelayStateBuilder) Build() DelayState { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEndBuilder() *EndBuilder { - builder := &EndBuilder{} - builder.model = End{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type EndBuilder struct { - model End - produceevents []*ProduceEventBuilder - continueas *ContinueAsBuilder -} - -func (b *EndBuilder) Terminate(input bool) *EndBuilder { - b.model.Terminate = input - return b -} - -func (b *EndBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *EndBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *EndBuilder) Compensate(input bool) *EndBuilder { - b.model.Compensate = input - return b -} - -func (b *EndBuilder) ContinueAs() *ContinueAsBuilder { - if b.continueas == nil { - b.continueas = NewContinueAsBuilder() - } - return b.continueas -} - -func (b *EndBuilder) Build() End { - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - if b.continueas != nil { - continueas := b.continueas.Build() - b.model.ContinueAs = &continueas - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorBuilder() *ErrorBuilder { - builder := &ErrorBuilder{} - builder.model = Error{} - return builder -} - -type ErrorBuilder struct { - model Error -} - -func (b *ErrorBuilder) Name(input string) *ErrorBuilder { - b.model.Name = input - return b -} - -func (b *ErrorBuilder) Code(input string) *ErrorBuilder { - b.model.Code = input - return b -} - -func (b *ErrorBuilder) Description(input string) *ErrorBuilder { - b.model.Description = input - return b -} - -func (b *ErrorBuilder) Build() Error { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewErrorsBuilder() *ErrorsBuilder { - builder := &ErrorsBuilder{} - builder.model = Errors{} - return builder -} - -type ErrorsBuilder struct { - model Errors -} - -func (b *ErrorsBuilder) Build() Errors { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventBuilder() *EventBuilder { - builder := &EventBuilder{} - builder.model = Event{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - builder.correlation = []*CorrelationBuilder{} - return builder -} - -type EventBuilder struct { - model Event - CommonBuilder - correlation []*CorrelationBuilder -} - -func (b *EventBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *EventBuilder) Name(input string) *EventBuilder { - b.model.Name = input - return b -} - -func (b *EventBuilder) Source(input string) *EventBuilder { - b.model.Source = input - return b -} - -func (b *EventBuilder) Type(input string) *EventBuilder { - b.model.Type = input - return b -} - -func (b *EventBuilder) Kind(input EventKind) *EventBuilder { - b.model.Kind = input - return b -} - -func (b *EventBuilder) DataOnly(input bool) *EventBuilder { - b.model.DataOnly = input - return b -} - -func (b *EventBuilder) AddCorrelation() *CorrelationBuilder { - builder := NewCorrelationBuilder() - b.correlation = append(b.correlation, builder) - return builder -} - -func (b *EventBuilder) RemoveCorrelation(remove *CorrelationBuilder) { - for i, val := range b.correlation { - if val == remove { - b.correlation[i] = b.correlation[len(b.correlation)-1] - b.correlation = b.correlation[:len(b.correlation)-1] - } - } -} -func (b *EventBuilder) Build() Event { - b.model.Common = b.CommonBuilder.Build() - b.model.Correlation = []Correlation{} - for _, v := range b.correlation { - b.model.Correlation = append(b.model.Correlation, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionBuilder() *EventConditionBuilder { - builder := &EventConditionBuilder{} - builder.model = EventCondition{} - return builder -} - -type EventConditionBuilder struct { - model EventCondition - eventdatafilter *EventDataFilterBuilder - end *EndBuilder - transition *TransitionBuilder -} - -func (b *EventConditionBuilder) Name(input string) *EventConditionBuilder { - b.model.Name = input - return b -} - -func (b *EventConditionBuilder) EventRef(input string) *EventConditionBuilder { - b.model.EventRef = input - return b -} - -func (b *EventConditionBuilder) EventDataFilter() *EventDataFilterBuilder { - if b.eventdatafilter == nil { - b.eventdatafilter = NewEventDataFilterBuilder() - } - return b.eventdatafilter -} - -func (b *EventConditionBuilder) Metadata(input Metadata) *EventConditionBuilder { - b.model.Metadata = input - return b -} - -func (b *EventConditionBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *EventConditionBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *EventConditionBuilder) Build() EventCondition { - if b.eventdatafilter != nil { - eventdatafilter := b.eventdatafilter.Build() - b.model.EventDataFilter = &eventdatafilter - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventConditionsBuilder() *EventConditionsBuilder { - builder := &EventConditionsBuilder{} - builder.model = EventConditions{} - return builder -} - -type EventConditionsBuilder struct { - model EventConditions -} - -func (b *EventConditionsBuilder) Build() EventConditions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventDataFilterBuilder() *EventDataFilterBuilder { - builder := &EventDataFilterBuilder{} - builder.model = EventDataFilter{} - builder.model.ApplyDefault() - return builder -} - -type EventDataFilterBuilder struct { - model EventDataFilter -} - -func (b *EventDataFilterBuilder) UseData(input bool) *EventDataFilterBuilder { - b.model.UseData = input - return b -} - -func (b *EventDataFilterBuilder) Data(input string) *EventDataFilterBuilder { - b.model.Data = input - return b -} - -func (b *EventDataFilterBuilder) ToStateData(input string) *EventDataFilterBuilder { - b.model.ToStateData = input - return b -} - -func (b *EventDataFilterBuilder) Build() EventDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventRefBuilder() *EventRefBuilder { - builder := &EventRefBuilder{} - builder.model = EventRef{} - builder.model.ApplyDefault() - return builder -} - -type EventRefBuilder struct { - model EventRef - data *ObjectBuilder -} - -func (b *EventRefBuilder) TriggerEventRef(input string) *EventRefBuilder { - b.model.TriggerEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventRef(input string) *EventRefBuilder { - b.model.ResultEventRef = input - return b -} - -func (b *EventRefBuilder) ResultEventTimeout(input string) *EventRefBuilder { - b.model.ResultEventTimeout = input - return b -} - -func (b *EventRefBuilder) Data() *ObjectBuilder { - if b.data == nil { - b.data = NewObjectBuilder() - } - return b.data -} - -func (b *EventRefBuilder) ContextAttributes(input map[string]Object) *EventRefBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *EventRefBuilder) Invoke(input InvokeKind) *EventRefBuilder { - b.model.Invoke = input - return b -} - -func (b *EventRefBuilder) Build() EventRef { - if b.data != nil { - data := b.data.Build() - b.model.Data = &data - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateBuilder() *EventStateBuilder { - builder := &EventStateBuilder{} - builder.model = EventState{} - builder.model.ApplyDefault() - builder.onevents = []*OnEventsBuilder{} - return builder -} - -type EventStateBuilder struct { - model EventState - onevents []*OnEventsBuilder - timeouts *EventStateTimeoutBuilder -} - -func (b *EventStateBuilder) Exclusive(input bool) *EventStateBuilder { - b.model.Exclusive = input - return b -} - -func (b *EventStateBuilder) AddOnEvents() *OnEventsBuilder { - builder := NewOnEventsBuilder() - b.onevents = append(b.onevents, builder) - return builder -} - -func (b *EventStateBuilder) RemoveOnEvents(remove *OnEventsBuilder) { - for i, val := range b.onevents { - if val == remove { - b.onevents[i] = b.onevents[len(b.onevents)-1] - b.onevents = b.onevents[:len(b.onevents)-1] - } - } -} -func (b *EventStateBuilder) Timeouts() *EventStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewEventStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *EventStateBuilder) Build() EventState { - b.model.OnEvents = []OnEvents{} - for _, v := range b.onevents { - b.model.OnEvents = append(b.model.OnEvents, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventStateTimeoutBuilder() *EventStateTimeoutBuilder { - builder := &EventStateTimeoutBuilder{} - builder.model = EventStateTimeout{} - return builder -} - -type EventStateTimeoutBuilder struct { - model EventStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *EventStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *EventStateTimeoutBuilder) ActionExecTimeout(input string) *EventStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) EventTimeout(input string) *EventStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *EventStateTimeoutBuilder) Build() EventStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewEventsBuilder() *EventsBuilder { - builder := &EventsBuilder{} - builder.model = Events{} - return builder -} - -type EventsBuilder struct { - model Events -} - -func (b *EventsBuilder) Build() Events { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateBuilder() *ForEachStateBuilder { - builder := &ForEachStateBuilder{} - builder.model = ForEachState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type ForEachStateBuilder struct { - model ForEachState - actions []*ActionBuilder - timeouts *ForEachStateTimeoutBuilder -} - -func (b *ForEachStateBuilder) InputCollection(input string) *ForEachStateBuilder { - b.model.InputCollection = input - return b -} - -func (b *ForEachStateBuilder) OutputCollection(input string) *ForEachStateBuilder { - b.model.OutputCollection = input - return b -} - -func (b *ForEachStateBuilder) IterationParam(input string) *ForEachStateBuilder { - b.model.IterationParam = input - return b -} - -func (b *ForEachStateBuilder) BatchSize(input *intstr.IntOrString) *ForEachStateBuilder { - b.model.BatchSize = input - return b -} - -func (b *ForEachStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *ForEachStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *ForEachStateBuilder) Timeouts() *ForEachStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewForEachStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ForEachStateBuilder) Mode(input ForEachModeType) *ForEachStateBuilder { - b.model.Mode = input - return b -} - -func (b *ForEachStateBuilder) Build() ForEachState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewForEachStateTimeoutBuilder() *ForEachStateTimeoutBuilder { - builder := &ForEachStateTimeoutBuilder{} - builder.model = ForEachStateTimeout{} - return builder -} - -type ForEachStateTimeoutBuilder struct { - model ForEachStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ForEachStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ForEachStateTimeoutBuilder) ActionExecTimeout(input string) *ForEachStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *ForEachStateTimeoutBuilder) Build() ForEachStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionBuilder() *FunctionBuilder { - builder := &FunctionBuilder{} - builder.model = Function{} - builder.model.ApplyDefault() - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type FunctionBuilder struct { - model Function - CommonBuilder -} - -func (b *FunctionBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *FunctionBuilder) Name(input string) *FunctionBuilder { - b.model.Name = input - return b -} - -func (b *FunctionBuilder) Operation(input string) *FunctionBuilder { - b.model.Operation = input - return b -} - -func (b *FunctionBuilder) Type(input FunctionType) *FunctionBuilder { - b.model.Type = input - return b -} - -func (b *FunctionBuilder) AuthRef(input string) *FunctionBuilder { - b.model.AuthRef = input - return b -} - -func (b *FunctionBuilder) Build() Function { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionRefBuilder() *FunctionRefBuilder { - builder := &FunctionRefBuilder{} - builder.model = FunctionRef{} - builder.model.ApplyDefault() - return builder -} - -type FunctionRefBuilder struct { - model FunctionRef -} - -func (b *FunctionRefBuilder) RefName(input string) *FunctionRefBuilder { - b.model.RefName = input - return b -} - -func (b *FunctionRefBuilder) Arguments(input map[string]Object) *FunctionRefBuilder { - b.model.Arguments = input - return b -} - -func (b *FunctionRefBuilder) SelectionSet(input string) *FunctionRefBuilder { - b.model.SelectionSet = input - return b -} - -func (b *FunctionRefBuilder) Invoke(input InvokeKind) *FunctionRefBuilder { - b.model.Invoke = input - return b -} - -func (b *FunctionRefBuilder) Build() FunctionRef { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewFunctionsBuilder() *FunctionsBuilder { - builder := &FunctionsBuilder{} - builder.model = Functions{} - return builder -} - -type FunctionsBuilder struct { - model Functions -} - -func (b *FunctionsBuilder) Build() Functions { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateBuilder() *InjectStateBuilder { - builder := &InjectStateBuilder{} - builder.model = InjectState{} - return builder -} - -type InjectStateBuilder struct { - model InjectState - timeouts *InjectStateTimeoutBuilder -} - -func (b *InjectStateBuilder) Data(input map[string]Object) *InjectStateBuilder { - b.model.Data = input - return b -} - -func (b *InjectStateBuilder) Timeouts() *InjectStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewInjectStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *InjectStateBuilder) Build() InjectState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewInjectStateTimeoutBuilder() *InjectStateTimeoutBuilder { - builder := &InjectStateTimeoutBuilder{} - builder.model = InjectStateTimeout{} - return builder -} - -type InjectStateTimeoutBuilder struct { - model InjectStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *InjectStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *InjectStateTimeoutBuilder) Build() InjectStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewMetadataBuilder() *MetadataBuilder { - builder := &MetadataBuilder{} - builder.model = Metadata{} - return builder -} - -type MetadataBuilder struct { - model Metadata -} - -func (b *MetadataBuilder) Build() Metadata { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOAuth2AuthPropertiesBuilder() *OAuth2AuthPropertiesBuilder { - builder := &OAuth2AuthPropertiesBuilder{} - builder.model = OAuth2AuthProperties{} - builder.CommonBuilder = *NewCommonBuilder() - return builder -} - -type OAuth2AuthPropertiesBuilder struct { - model OAuth2AuthProperties - CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Common() *CommonBuilder { - return &b.CommonBuilder -} - -func (b *OAuth2AuthPropertiesBuilder) Secret(input string) *OAuth2AuthPropertiesBuilder { - b.model.Secret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Authority(input string) *OAuth2AuthPropertiesBuilder { - b.model.Authority = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) GrantType(input GrantType) *OAuth2AuthPropertiesBuilder { - b.model.GrantType = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientID(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientID = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) ClientSecret(input string) *OAuth2AuthPropertiesBuilder { - b.model.ClientSecret = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Scopes(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Scopes = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Username(input string) *OAuth2AuthPropertiesBuilder { - b.model.Username = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Password(input string) *OAuth2AuthPropertiesBuilder { - b.model.Password = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Audiences(input []string) *OAuth2AuthPropertiesBuilder { - b.model.Audiences = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) SubjectToken(input string) *OAuth2AuthPropertiesBuilder { - b.model.SubjectToken = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedSubject(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedSubject = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) RequestedIssuer(input string) *OAuth2AuthPropertiesBuilder { - b.model.RequestedIssuer = input - return b -} - -func (b *OAuth2AuthPropertiesBuilder) Build() OAuth2AuthProperties { - b.model.Common = b.CommonBuilder.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewObjectBuilder() *ObjectBuilder { - builder := &ObjectBuilder{} - builder.model = Object{} - builder.slicevalue = []*ObjectBuilder{} - return builder -} - -type ObjectBuilder struct { - model Object - slicevalue []*ObjectBuilder -} - -func (b *ObjectBuilder) Type(input Type) *ObjectBuilder { - b.model.Type = input - return b -} - -func (b *ObjectBuilder) StringValue(input string) *ObjectBuilder { - b.model.StringValue = input - return b -} - -func (b *ObjectBuilder) IntValue(input int32) *ObjectBuilder { - b.model.IntValue = input - return b -} - -func (b *ObjectBuilder) FloatValue(input float64) *ObjectBuilder { - b.model.FloatValue = input - return b -} - -func (b *ObjectBuilder) MapValue(input map[string]Object) *ObjectBuilder { - b.model.MapValue = input - return b -} - -func (b *ObjectBuilder) AddSliceValue() *ObjectBuilder { - builder := NewObjectBuilder() - b.slicevalue = append(b.slicevalue, builder) - return builder -} - -func (b *ObjectBuilder) RemoveSliceValue(remove *ObjectBuilder) { - for i, val := range b.slicevalue { - if val == remove { - b.slicevalue[i] = b.slicevalue[len(b.slicevalue)-1] - b.slicevalue = b.slicevalue[:len(b.slicevalue)-1] - } - } -} -func (b *ObjectBuilder) BoolValue(input bool) *ObjectBuilder { - b.model.BoolValue = input - return b -} - -func (b *ObjectBuilder) Build() Object { - b.model.SliceValue = []Object{} - for _, v := range b.slicevalue { - b.model.SliceValue = append(b.model.SliceValue, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnErrorBuilder() *OnErrorBuilder { - builder := &OnErrorBuilder{} - builder.model = OnError{} - return builder -} - -type OnErrorBuilder struct { - model OnError - transition *TransitionBuilder - end *EndBuilder -} - -func (b *OnErrorBuilder) ErrorRef(input string) *OnErrorBuilder { - b.model.ErrorRef = input - return b -} - -func (b *OnErrorBuilder) ErrorRefs(input []string) *OnErrorBuilder { - b.model.ErrorRefs = input - return b -} - -func (b *OnErrorBuilder) Transition() *TransitionBuilder { - if b.transition == nil { - b.transition = NewTransitionBuilder() - } - return b.transition -} - -func (b *OnErrorBuilder) End() *EndBuilder { - if b.end == nil { - b.end = NewEndBuilder() - } - return b.end -} - -func (b *OnErrorBuilder) Build() OnError { - if b.transition != nil { - transition := b.transition.Build() - b.model.Transition = &transition - } - if b.end != nil { - end := b.end.Build() - b.model.End = &end - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOnEventsBuilder() *OnEventsBuilder { - builder := &OnEventsBuilder{} - builder.model = OnEvents{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - builder.eventdatafilter = NewEventDataFilterBuilder() - return builder -} - -type OnEventsBuilder struct { - model OnEvents - actions []*ActionBuilder - eventdatafilter *EventDataFilterBuilder -} - -func (b *OnEventsBuilder) EventRefs(input []string) *OnEventsBuilder { - b.model.EventRefs = input - return b -} - -func (b *OnEventsBuilder) ActionMode(input ActionMode) *OnEventsBuilder { - b.model.ActionMode = input - return b -} - -func (b *OnEventsBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OnEventsBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OnEventsBuilder) EventDataFilter() *EventDataFilterBuilder { - return b.eventdatafilter -} - -func (b *OnEventsBuilder) Build() OnEvents { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - b.model.EventDataFilter = b.eventdatafilter.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateBuilder() *OperationStateBuilder { - builder := &OperationStateBuilder{} - builder.model = OperationState{} - builder.model.ApplyDefault() - builder.actions = []*ActionBuilder{} - return builder -} - -type OperationStateBuilder struct { - model OperationState - actions []*ActionBuilder - timeouts *OperationStateTimeoutBuilder -} - -func (b *OperationStateBuilder) ActionMode(input ActionMode) *OperationStateBuilder { - b.model.ActionMode = input - return b -} - -func (b *OperationStateBuilder) AddActions() *ActionBuilder { - builder := NewActionBuilder() - b.actions = append(b.actions, builder) - return builder -} - -func (b *OperationStateBuilder) RemoveActions(remove *ActionBuilder) { - for i, val := range b.actions { - if val == remove { - b.actions[i] = b.actions[len(b.actions)-1] - b.actions = b.actions[:len(b.actions)-1] - } - } -} -func (b *OperationStateBuilder) Timeouts() *OperationStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewOperationStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *OperationStateBuilder) Build() OperationState { - b.model.Actions = []Action{} - for _, v := range b.actions { - b.model.Actions = append(b.model.Actions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewOperationStateTimeoutBuilder() *OperationStateTimeoutBuilder { - builder := &OperationStateTimeoutBuilder{} - builder.model = OperationStateTimeout{} - return builder -} - -type OperationStateTimeoutBuilder struct { - model OperationStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *OperationStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *OperationStateTimeoutBuilder) ActionExecTimeout(input string) *OperationStateTimeoutBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *OperationStateTimeoutBuilder) Build() OperationStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateBuilder() *ParallelStateBuilder { - builder := &ParallelStateBuilder{} - builder.model = ParallelState{} - builder.model.ApplyDefault() - builder.branches = []*BranchBuilder{} - return builder -} - -type ParallelStateBuilder struct { - model ParallelState - branches []*BranchBuilder - timeouts *ParallelStateTimeoutBuilder -} - -func (b *ParallelStateBuilder) AddBranches() *BranchBuilder { - builder := NewBranchBuilder() - b.branches = append(b.branches, builder) - return builder -} - -func (b *ParallelStateBuilder) RemoveBranches(remove *BranchBuilder) { - for i, val := range b.branches { - if val == remove { - b.branches[i] = b.branches[len(b.branches)-1] - b.branches = b.branches[:len(b.branches)-1] - } - } -} -func (b *ParallelStateBuilder) CompletionType(input CompletionType) *ParallelStateBuilder { - b.model.CompletionType = input - return b -} - -func (b *ParallelStateBuilder) NumCompleted(input intstr.IntOrString) *ParallelStateBuilder { - b.model.NumCompleted = input - return b -} - -func (b *ParallelStateBuilder) Timeouts() *ParallelStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewParallelStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *ParallelStateBuilder) Build() ParallelState { - b.model.Branches = []Branch{} - for _, v := range b.branches { - b.model.Branches = append(b.model.Branches, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewParallelStateTimeoutBuilder() *ParallelStateTimeoutBuilder { - builder := &ParallelStateTimeoutBuilder{} - builder.model = ParallelStateTimeout{} - return builder -} - -type ParallelStateTimeoutBuilder struct { - model ParallelStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *ParallelStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *ParallelStateTimeoutBuilder) BranchExecTimeout(input string) *ParallelStateTimeoutBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *ParallelStateTimeoutBuilder) Build() ParallelStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewProduceEventBuilder() *ProduceEventBuilder { - builder := &ProduceEventBuilder{} - builder.model = ProduceEvent{} - builder.data = NewObjectBuilder() - return builder -} - -type ProduceEventBuilder struct { - model ProduceEvent - data *ObjectBuilder -} - -func (b *ProduceEventBuilder) EventRef(input string) *ProduceEventBuilder { - b.model.EventRef = input - return b -} - -func (b *ProduceEventBuilder) Data() *ObjectBuilder { - return b.data -} - -func (b *ProduceEventBuilder) ContextAttributes(input map[string]string) *ProduceEventBuilder { - b.model.ContextAttributes = input - return b -} - -func (b *ProduceEventBuilder) Build() ProduceEvent { - b.model.Data = b.data.Build() - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetriesBuilder() *RetriesBuilder { - builder := &RetriesBuilder{} - builder.model = Retries{} - return builder -} - -type RetriesBuilder struct { - model Retries -} - -func (b *RetriesBuilder) Build() Retries { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewRetryBuilder() *RetryBuilder { - builder := &RetryBuilder{} - builder.model = Retry{} - builder.model.ApplyDefault() - return builder -} - -type RetryBuilder struct { - model Retry -} - -func (b *RetryBuilder) Name(input string) *RetryBuilder { - b.model.Name = input - return b -} - -func (b *RetryBuilder) Delay(input string) *RetryBuilder { - b.model.Delay = input - return b -} - -func (b *RetryBuilder) MaxDelay(input string) *RetryBuilder { - b.model.MaxDelay = input - return b -} - -func (b *RetryBuilder) Increment(input string) *RetryBuilder { - b.model.Increment = input - return b -} - -func (b *RetryBuilder) Multiplier(input *floatstr.Float32OrString) *RetryBuilder { - b.model.Multiplier = input - return b -} - -func (b *RetryBuilder) MaxAttempts(input intstr.IntOrString) *RetryBuilder { - b.model.MaxAttempts = input - return b -} - -func (b *RetryBuilder) Jitter(input floatstr.Float32OrString) *RetryBuilder { - b.model.Jitter = input - return b -} - -func (b *RetryBuilder) Build() Retry { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewScheduleBuilder() *ScheduleBuilder { - builder := &ScheduleBuilder{} - builder.model = Schedule{} - return builder -} - -type ScheduleBuilder struct { - model Schedule - cron *CronBuilder -} - -func (b *ScheduleBuilder) Interval(input string) *ScheduleBuilder { - b.model.Interval = input - return b -} - -func (b *ScheduleBuilder) Cron() *CronBuilder { - if b.cron == nil { - b.cron = NewCronBuilder() - } - return b.cron -} - -func (b *ScheduleBuilder) Timezone(input string) *ScheduleBuilder { - b.model.Timezone = input - return b -} - -func (b *ScheduleBuilder) Build() Schedule { - if b.cron != nil { - cron := b.cron.Build() - b.model.Cron = &cron - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSecretsBuilder() *SecretsBuilder { - builder := &SecretsBuilder{} - builder.model = Secrets{} - return builder -} - -type SecretsBuilder struct { - model Secrets -} - -func (b *SecretsBuilder) Build() Secrets { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepBuilder() *SleepBuilder { - builder := &SleepBuilder{} - builder.model = Sleep{} - return builder -} - -type SleepBuilder struct { - model Sleep -} - -func (b *SleepBuilder) Before(input string) *SleepBuilder { - b.model.Before = input - return b -} - -func (b *SleepBuilder) After(input string) *SleepBuilder { - b.model.After = input - return b -} - -func (b *SleepBuilder) Build() Sleep { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateBuilder() *SleepStateBuilder { - builder := &SleepStateBuilder{} - builder.model = SleepState{} - return builder -} - -type SleepStateBuilder struct { - model SleepState - timeouts *SleepStateTimeoutBuilder -} - -func (b *SleepStateBuilder) Duration(input string) *SleepStateBuilder { - b.model.Duration = input - return b -} - -func (b *SleepStateBuilder) Timeouts() *SleepStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSleepStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SleepStateBuilder) Build() SleepState { - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSleepStateTimeoutBuilder() *SleepStateTimeoutBuilder { - builder := &SleepStateTimeoutBuilder{} - builder.model = SleepStateTimeout{} - return builder -} - -type SleepStateTimeoutBuilder struct { - model SleepStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SleepStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SleepStateTimeoutBuilder) Build() SleepStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStartBuilder() *StartBuilder { - builder := &StartBuilder{} - builder.model = Start{} - return builder -} - -type StartBuilder struct { - model Start - schedule *ScheduleBuilder -} - -func (b *StartBuilder) StateName(input string) *StartBuilder { - b.model.StateName = input - return b -} - -func (b *StartBuilder) Schedule() *ScheduleBuilder { - if b.schedule == nil { - b.schedule = NewScheduleBuilder() - } - return b.schedule -} - -func (b *StartBuilder) Build() Start { - if b.schedule != nil { - schedule := b.schedule.Build() - b.model.Schedule = &schedule - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateBuilder() *StateBuilder { - builder := &StateBuilder{} - builder.model = State{} - builder.BaseStateBuilder = *NewBaseStateBuilder() - return builder -} - -type StateBuilder struct { - model State - BaseStateBuilder - *DelayStateBuilder - *EventStateBuilder - *OperationStateBuilder - *ParallelStateBuilder - *SwitchStateBuilder - *ForEachStateBuilder - *InjectStateBuilder - *CallbackStateBuilder - *SleepStateBuilder -} - -func (b *StateBuilder) ID(input string) *StateBuilder { - b.BaseStateBuilder.ID(input) - return b -} - -func (b *StateBuilder) Name(input string) *StateBuilder { - b.BaseStateBuilder.Name(input) - return b -} - -func (b *StateBuilder) Type(input StateType) *StateBuilder { - b.BaseStateBuilder.Type(input) - return b -} - -func (b *StateBuilder) CompensatedBy(input string) *StateBuilder { - b.BaseStateBuilder.CompensatedBy(input) - return b -} - -func (b *StateBuilder) UsedForCompensation(input bool) *StateBuilder { - b.BaseStateBuilder.UsedForCompensation(input) - return b -} - -func (b *StateBuilder) DelayState() *DelayStateBuilder { - if b.DelayStateBuilder == nil { - b.DelayStateBuilder = NewDelayStateBuilder() - } - return b.DelayStateBuilder -} - -func (b *StateBuilder) TimeDelay(input string) *StateBuilder { - b.DelayStateBuilder.TimeDelay(input) - return b -} - -func (b *StateBuilder) EventState() *EventStateBuilder { - if b.EventStateBuilder == nil { - b.EventStateBuilder = NewEventStateBuilder() - } - return b.EventStateBuilder -} - -func (b *StateBuilder) Exclusive(input bool) *StateBuilder { - b.EventStateBuilder.Exclusive(input) - return b -} - -func (b *StateBuilder) OperationState() *OperationStateBuilder { - if b.OperationStateBuilder == nil { - b.OperationStateBuilder = NewOperationStateBuilder() - } - return b.OperationStateBuilder -} - -func (b *StateBuilder) ActionMode(input ActionMode) *StateBuilder { - b.OperationStateBuilder.ActionMode(input) - return b -} - -func (b *StateBuilder) ParallelState() *ParallelStateBuilder { - if b.ParallelStateBuilder == nil { - b.ParallelStateBuilder = NewParallelStateBuilder() - } - return b.ParallelStateBuilder -} - -func (b *StateBuilder) CompletionType(input CompletionType) *StateBuilder { - b.ParallelStateBuilder.CompletionType(input) - return b -} - -func (b *StateBuilder) SwitchState() *SwitchStateBuilder { - if b.SwitchStateBuilder == nil { - b.SwitchStateBuilder = NewSwitchStateBuilder() - } - return b.SwitchStateBuilder -} - -func (b *StateBuilder) ForEachState() *ForEachStateBuilder { - if b.ForEachStateBuilder == nil { - b.ForEachStateBuilder = NewForEachStateBuilder() - } - return b.ForEachStateBuilder -} - -func (b *StateBuilder) InputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.InputCollection(input) - return b -} - -func (b *StateBuilder) OutputCollection(input string) *StateBuilder { - b.ForEachStateBuilder.OutputCollection(input) - return b -} - -func (b *StateBuilder) IterationParam(input string) *StateBuilder { - b.ForEachStateBuilder.IterationParam(input) - return b -} - -func (b *StateBuilder) Mode(input ForEachModeType) *StateBuilder { - b.ForEachStateBuilder.Mode(input) - return b -} - -func (b *StateBuilder) InjectState() *InjectStateBuilder { - if b.InjectStateBuilder == nil { - b.InjectStateBuilder = NewInjectStateBuilder() - } - return b.InjectStateBuilder -} - -func (b *StateBuilder) CallbackState() *CallbackStateBuilder { - if b.CallbackStateBuilder == nil { - b.CallbackStateBuilder = NewCallbackStateBuilder() - } - return b.CallbackStateBuilder -} - -func (b *StateBuilder) EventRef(input string) *StateBuilder { - b.CallbackStateBuilder.EventRef(input) - return b -} - -func (b *StateBuilder) SleepState() *SleepStateBuilder { - if b.SleepStateBuilder == nil { - b.SleepStateBuilder = NewSleepStateBuilder() - } - return b.SleepStateBuilder -} - -func (b *StateBuilder) Duration(input string) *StateBuilder { - b.SleepStateBuilder.Duration(input) - return b -} - -func (b *StateBuilder) Build() State { - b.model.BaseState = b.BaseStateBuilder.Build() - if b.DelayStateBuilder != nil { - delaystate := b.DelayStateBuilder.Build() - b.model.DelayState = &delaystate - } - if b.EventStateBuilder != nil { - eventstate := b.EventStateBuilder.Build() - b.model.EventState = &eventstate - } - if b.OperationStateBuilder != nil { - operationstate := b.OperationStateBuilder.Build() - b.model.OperationState = &operationstate - } - if b.ParallelStateBuilder != nil { - parallelstate := b.ParallelStateBuilder.Build() - b.model.ParallelState = ¶llelstate - } - if b.SwitchStateBuilder != nil { - switchstate := b.SwitchStateBuilder.Build() - b.model.SwitchState = &switchstate - } - if b.ForEachStateBuilder != nil { - foreachstate := b.ForEachStateBuilder.Build() - b.model.ForEachState = &foreachstate - } - if b.InjectStateBuilder != nil { - injectstate := b.InjectStateBuilder.Build() - b.model.InjectState = &injectstate - } - if b.CallbackStateBuilder != nil { - callbackstate := b.CallbackStateBuilder.Build() - b.model.CallbackState = &callbackstate - } - if b.SleepStateBuilder != nil { - sleepstate := b.SleepStateBuilder.Build() - b.model.SleepState = &sleepstate - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateDataFilterBuilder() *StateDataFilterBuilder { - builder := &StateDataFilterBuilder{} - builder.model = StateDataFilter{} - return builder -} - -type StateDataFilterBuilder struct { - model StateDataFilter -} - -func (b *StateDataFilterBuilder) Input(input string) *StateDataFilterBuilder { - b.model.Input = input - return b -} - -func (b *StateDataFilterBuilder) Output(input string) *StateDataFilterBuilder { - b.model.Output = input - return b -} - -func (b *StateDataFilterBuilder) Build() StateDataFilter { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStateExecTimeoutBuilder() *StateExecTimeoutBuilder { - builder := &StateExecTimeoutBuilder{} - builder.model = StateExecTimeout{} - return builder -} - -type StateExecTimeoutBuilder struct { - model StateExecTimeout -} - -func (b *StateExecTimeoutBuilder) Single(input string) *StateExecTimeoutBuilder { - b.model.Single = input - return b -} - -func (b *StateExecTimeoutBuilder) Total(input string) *StateExecTimeoutBuilder { - b.model.Total = input - return b -} - -func (b *StateExecTimeoutBuilder) Build() StateExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewStatesBuilder() *StatesBuilder { - builder := &StatesBuilder{} - builder.model = States{} - return builder -} - -type StatesBuilder struct { - model States -} - -func (b *StatesBuilder) Build() States { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateBuilder() *SwitchStateBuilder { - builder := &SwitchStateBuilder{} - builder.model = SwitchState{} - builder.defaultcondition = NewDefaultConditionBuilder() - builder.eventconditions = []*EventConditionBuilder{} - builder.dataconditions = []*DataConditionBuilder{} - return builder -} - -type SwitchStateBuilder struct { - model SwitchState - defaultcondition *DefaultConditionBuilder - eventconditions []*EventConditionBuilder - dataconditions []*DataConditionBuilder - timeouts *SwitchStateTimeoutBuilder -} - -func (b *SwitchStateBuilder) DefaultCondition() *DefaultConditionBuilder { - return b.defaultcondition -} - -func (b *SwitchStateBuilder) AddEventConditions() *EventConditionBuilder { - builder := NewEventConditionBuilder() - b.eventconditions = append(b.eventconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveEventConditions(remove *EventConditionBuilder) { - for i, val := range b.eventconditions { - if val == remove { - b.eventconditions[i] = b.eventconditions[len(b.eventconditions)-1] - b.eventconditions = b.eventconditions[:len(b.eventconditions)-1] - } - } -} -func (b *SwitchStateBuilder) AddDataConditions() *DataConditionBuilder { - builder := NewDataConditionBuilder() - b.dataconditions = append(b.dataconditions, builder) - return builder -} - -func (b *SwitchStateBuilder) RemoveDataConditions(remove *DataConditionBuilder) { - for i, val := range b.dataconditions { - if val == remove { - b.dataconditions[i] = b.dataconditions[len(b.dataconditions)-1] - b.dataconditions = b.dataconditions[:len(b.dataconditions)-1] - } - } -} -func (b *SwitchStateBuilder) Timeouts() *SwitchStateTimeoutBuilder { - if b.timeouts == nil { - b.timeouts = NewSwitchStateTimeoutBuilder() - } - return b.timeouts -} - -func (b *SwitchStateBuilder) Build() SwitchState { - b.model.DefaultCondition = b.defaultcondition.Build() - b.model.EventConditions = []EventCondition{} - for _, v := range b.eventconditions { - b.model.EventConditions = append(b.model.EventConditions, v.Build()) - } - b.model.DataConditions = []DataCondition{} - for _, v := range b.dataconditions { - b.model.DataConditions = append(b.model.DataConditions, v.Build()) - } - if b.timeouts != nil { - timeouts := b.timeouts.Build() - b.model.Timeouts = &timeouts - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewSwitchStateTimeoutBuilder() *SwitchStateTimeoutBuilder { - builder := &SwitchStateTimeoutBuilder{} - builder.model = SwitchStateTimeout{} - return builder -} - -type SwitchStateTimeoutBuilder struct { - model SwitchStateTimeout - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *SwitchStateTimeoutBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *SwitchStateTimeoutBuilder) EventTimeout(input string) *SwitchStateTimeoutBuilder { - b.model.EventTimeout = input - return b -} - -func (b *SwitchStateTimeoutBuilder) Build() SwitchStateTimeout { - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTimeoutsBuilder() *TimeoutsBuilder { - builder := &TimeoutsBuilder{} - builder.model = Timeouts{} - return builder -} - -type TimeoutsBuilder struct { - model Timeouts - workflowexectimeout *WorkflowExecTimeoutBuilder - stateexectimeout *StateExecTimeoutBuilder -} - -func (b *TimeoutsBuilder) WorkflowExecTimeout() *WorkflowExecTimeoutBuilder { - if b.workflowexectimeout == nil { - b.workflowexectimeout = NewWorkflowExecTimeoutBuilder() - } - return b.workflowexectimeout -} - -func (b *TimeoutsBuilder) StateExecTimeout() *StateExecTimeoutBuilder { - if b.stateexectimeout == nil { - b.stateexectimeout = NewStateExecTimeoutBuilder() - } - return b.stateexectimeout -} - -func (b *TimeoutsBuilder) ActionExecTimeout(input string) *TimeoutsBuilder { - b.model.ActionExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) BranchExecTimeout(input string) *TimeoutsBuilder { - b.model.BranchExecTimeout = input - return b -} - -func (b *TimeoutsBuilder) EventTimeout(input string) *TimeoutsBuilder { - b.model.EventTimeout = input - return b -} - -func (b *TimeoutsBuilder) Build() Timeouts { - if b.workflowexectimeout != nil { - workflowexectimeout := b.workflowexectimeout.Build() - b.model.WorkflowExecTimeout = &workflowexectimeout - } - if b.stateexectimeout != nil { - stateexectimeout := b.stateexectimeout.Build() - b.model.StateExecTimeout = &stateexectimeout - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewTransitionBuilder() *TransitionBuilder { - builder := &TransitionBuilder{} - builder.model = Transition{} - builder.produceevents = []*ProduceEventBuilder{} - return builder -} - -type TransitionBuilder struct { - model Transition - stateparent *StateBuilder - produceevents []*ProduceEventBuilder -} - -func (b *TransitionBuilder) stateParent() *StateBuilder { - if b.stateparent == nil { - b.stateparent = NewStateBuilder() - } - return b.stateparent -} - -func (b *TransitionBuilder) NextState(input string) *TransitionBuilder { - b.model.NextState = input - return b -} - -func (b *TransitionBuilder) AddProduceEvents() *ProduceEventBuilder { - builder := NewProduceEventBuilder() - b.produceevents = append(b.produceevents, builder) - return builder -} - -func (b *TransitionBuilder) RemoveProduceEvents(remove *ProduceEventBuilder) { - for i, val := range b.produceevents { - if val == remove { - b.produceevents[i] = b.produceevents[len(b.produceevents)-1] - b.produceevents = b.produceevents[:len(b.produceevents)-1] - } - } -} -func (b *TransitionBuilder) Compensate(input bool) *TransitionBuilder { - b.model.Compensate = input - return b -} - -func (b *TransitionBuilder) Build() Transition { - if b.stateparent != nil { - stateparent := b.stateparent.Build() - b.model.stateParent = &stateparent - } - b.model.ProduceEvents = []ProduceEvent{} - for _, v := range b.produceevents { - b.model.ProduceEvents = append(b.model.ProduceEvents, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowBuilder() *WorkflowBuilder { - builder := &WorkflowBuilder{} - builder.model = Workflow{} - builder.BaseWorkflowBuilder = *NewBaseWorkflowBuilder() - builder.states = []*StateBuilder{} - builder.events = []*EventBuilder{} - builder.functions = []*FunctionBuilder{} - builder.retries = []*RetryBuilder{} - return builder -} - -type WorkflowBuilder struct { - model Workflow - BaseWorkflowBuilder - states []*StateBuilder - events []*EventBuilder - functions []*FunctionBuilder - retries []*RetryBuilder -} - -func (b *WorkflowBuilder) ID(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.ID(input) - return b -} - -func (b *WorkflowBuilder) Key(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Key(input) - return b -} - -func (b *WorkflowBuilder) Name(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Name(input) - return b -} - -func (b *WorkflowBuilder) Description(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Description(input) - return b -} - -func (b *WorkflowBuilder) Version(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.Version(input) - return b -} - -func (b *WorkflowBuilder) SpecVersion(input string) *WorkflowBuilder { - b.BaseWorkflowBuilder.SpecVersion(input) - return b -} - -func (b *WorkflowBuilder) ExpressionLang(input ExpressionLangType) *WorkflowBuilder { - b.BaseWorkflowBuilder.ExpressionLang(input) - return b -} - -func (b *WorkflowBuilder) KeepActive(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.KeepActive(input) - return b -} - -func (b *WorkflowBuilder) AutoRetries(input bool) *WorkflowBuilder { - b.BaseWorkflowBuilder.AutoRetries(input) - return b -} - -func (b *WorkflowBuilder) AddStates() *StateBuilder { - builder := NewStateBuilder() - b.states = append(b.states, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveStates(remove *StateBuilder) { - for i, val := range b.states { - if val == remove { - b.states[i] = b.states[len(b.states)-1] - b.states = b.states[:len(b.states)-1] - } - } -} -func (b *WorkflowBuilder) AddEvents() *EventBuilder { - builder := NewEventBuilder() - b.events = append(b.events, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveEvents(remove *EventBuilder) { - for i, val := range b.events { - if val == remove { - b.events[i] = b.events[len(b.events)-1] - b.events = b.events[:len(b.events)-1] - } - } -} -func (b *WorkflowBuilder) AddFunctions() *FunctionBuilder { - builder := NewFunctionBuilder() - b.functions = append(b.functions, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveFunctions(remove *FunctionBuilder) { - for i, val := range b.functions { - if val == remove { - b.functions[i] = b.functions[len(b.functions)-1] - b.functions = b.functions[:len(b.functions)-1] - } - } -} -func (b *WorkflowBuilder) AddRetries() *RetryBuilder { - builder := NewRetryBuilder() - b.retries = append(b.retries, builder) - return builder -} - -func (b *WorkflowBuilder) RemoveRetries(remove *RetryBuilder) { - for i, val := range b.retries { - if val == remove { - b.retries[i] = b.retries[len(b.retries)-1] - b.retries = b.retries[:len(b.retries)-1] - } - } -} -func (b *WorkflowBuilder) Build() Workflow { - b.model.BaseWorkflow = b.BaseWorkflowBuilder.Build() - b.model.States = []State{} - for _, v := range b.states { - b.model.States = append(b.model.States, v.Build()) - } - b.model.Events = []Event{} - for _, v := range b.events { - b.model.Events = append(b.model.Events, v.Build()) - } - b.model.Functions = []Function{} - for _, v := range b.functions { - b.model.Functions = append(b.model.Functions, v.Build()) - } - b.model.Retries = []Retry{} - for _, v := range b.retries { - b.model.Retries = append(b.model.Retries, v.Build()) - } - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowExecTimeoutBuilder() *WorkflowExecTimeoutBuilder { - builder := &WorkflowExecTimeoutBuilder{} - builder.model = WorkflowExecTimeout{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowExecTimeoutBuilder struct { - model WorkflowExecTimeout -} - -func (b *WorkflowExecTimeoutBuilder) Duration(input string) *WorkflowExecTimeoutBuilder { - b.model.Duration = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Interrupt(input bool) *WorkflowExecTimeoutBuilder { - b.model.Interrupt = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) RunBefore(input string) *WorkflowExecTimeoutBuilder { - b.model.RunBefore = input - return b -} - -func (b *WorkflowExecTimeoutBuilder) Build() WorkflowExecTimeout { - return b.model -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func NewWorkflowRefBuilder() *WorkflowRefBuilder { - builder := &WorkflowRefBuilder{} - builder.model = WorkflowRef{} - builder.model.ApplyDefault() - return builder -} - -type WorkflowRefBuilder struct { - model WorkflowRef -} - -func (b *WorkflowRefBuilder) WorkflowID(input string) *WorkflowRefBuilder { - b.model.WorkflowID = input - return b -} - -func (b *WorkflowRefBuilder) Version(input string) *WorkflowRefBuilder { - b.model.Version = input - return b -} - -func (b *WorkflowRefBuilder) Invoke(input InvokeKind) *WorkflowRefBuilder { - b.model.Invoke = input - return b -} - -func (b *WorkflowRefBuilder) OnParentComplete(input OnParentCompleteType) *WorkflowRefBuilder { - b.model.OnParentComplete = input - return b -} - -func (b *WorkflowRefBuilder) Build() WorkflowRef { - return b.model -} diff --git a/model/zz_generated.deepcopy.go b/model/zz_generated.deepcopy.go deleted file mode 100644 index 0fb2566..0000000 --- a/model/zz_generated.deepcopy.go +++ /dev/null @@ -1,1837 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Code generated by deepcopy-gen. DO NOT EDIT. - -package model - -import ( - json "encoding/json" - - floatstr "github.com/serverlessworkflow/sdk-go/v2/util/floatstr" - intstr "k8s.io/apimachinery/pkg/util/intstr" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Action) DeepCopyInto(out *Action) { - *out = *in - if in.FunctionRef != nil { - in, out := &in.FunctionRef, &out.FunctionRef - *out = new(FunctionRef) - (*in).DeepCopyInto(*out) - } - if in.EventRef != nil { - in, out := &in.EventRef, &out.EventRef - *out = new(EventRef) - (*in).DeepCopyInto(*out) - } - if in.SubFlowRef != nil { - in, out := &in.SubFlowRef, &out.SubFlowRef - *out = new(WorkflowRef) - **out = **in - } - if in.Sleep != nil { - in, out := &in.Sleep, &out.Sleep - *out = new(Sleep) - **out = **in - } - if in.NonRetryableErrors != nil { - in, out := &in.NonRetryableErrors, &out.NonRetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.RetryableErrors != nil { - in, out := &in.RetryableErrors, &out.RetryableErrors - *out = make([]string, len(*in)) - copy(*out, *in) - } - out.ActionDataFilter = in.ActionDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Action. -func (in *Action) DeepCopy() *Action { - if in == nil { - return nil - } - out := new(Action) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ActionDataFilter) DeepCopyInto(out *ActionDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ActionDataFilter. -func (in *ActionDataFilter) DeepCopy() *ActionDataFilter { - if in == nil { - return nil - } - out := new(ActionDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Auth) DeepCopyInto(out *Auth) { - *out = *in - in.Properties.DeepCopyInto(&out.Properties) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auth. -func (in *Auth) DeepCopy() *Auth { - if in == nil { - return nil - } - out := new(Auth) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AuthProperties) DeepCopyInto(out *AuthProperties) { - *out = *in - if in.Basic != nil { - in, out := &in.Basic, &out.Basic - *out = new(BasicAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.Bearer != nil { - in, out := &in.Bearer, &out.Bearer - *out = new(BearerAuthProperties) - (*in).DeepCopyInto(*out) - } - if in.OAuth2 != nil { - in, out := &in.OAuth2, &out.OAuth2 - *out = new(OAuth2AuthProperties) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthProperties. -func (in *AuthProperties) DeepCopy() *AuthProperties { - if in == nil { - return nil - } - out := new(AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Auths) DeepCopyInto(out *Auths) { - { - in := &in - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auths. -func (in Auths) DeepCopy() Auths { - if in == nil { - return nil - } - out := new(Auths) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseState) DeepCopyInto(out *BaseState) { - *out = *in - if in.OnErrors != nil { - in, out := &in.OnErrors, &out.OnErrors - *out = make([]OnError, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.StateDataFilter != nil { - in, out := &in.StateDataFilter, &out.StateDataFilter - *out = new(StateDataFilter) - **out = **in - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = new(Metadata) - if **in != nil { - in, out := *in, *out - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseState. -func (in *BaseState) DeepCopy() *BaseState { - if in == nil { - return nil - } - out := new(BaseState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BaseWorkflow) DeepCopyInto(out *BaseWorkflow) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(Start) - (*in).DeepCopyInto(*out) - } - if in.Annotations != nil { - in, out := &in.Annotations, &out.Annotations - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DataInputSchema != nil { - in, out := &in.DataInputSchema, &out.DataInputSchema - *out = new(DataInputSchema) - (*in).DeepCopyInto(*out) - } - if in.Secrets != nil { - in, out := &in.Secrets, &out.Secrets - *out = make(Secrets, len(*in)) - copy(*out, *in) - } - if in.Constants != nil { - in, out := &in.Constants, &out.Constants - *out = new(Constants) - (*in).DeepCopyInto(*out) - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(Timeouts) - (*in).DeepCopyInto(*out) - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(Errors, len(*in)) - copy(*out, *in) - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Auth != nil { - in, out := &in.Auth, &out.Auth - *out = make(Auths, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BaseWorkflow. -func (in *BaseWorkflow) DeepCopy() *BaseWorkflow { - if in == nil { - return nil - } - out := new(BaseWorkflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BasicAuthProperties) DeepCopyInto(out *BasicAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAuthProperties. -func (in *BasicAuthProperties) DeepCopy() *BasicAuthProperties { - if in == nil { - return nil - } - out := new(BasicAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BearerAuthProperties) DeepCopyInto(out *BearerAuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BearerAuthProperties. -func (in *BearerAuthProperties) DeepCopy() *BearerAuthProperties { - if in == nil { - return nil - } - out := new(BearerAuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Branch) DeepCopyInto(out *Branch) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(BranchTimeouts) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Branch. -func (in *Branch) DeepCopy() *Branch { - if in == nil { - return nil - } - out := new(Branch) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BranchTimeouts) DeepCopyInto(out *BranchTimeouts) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BranchTimeouts. -func (in *BranchTimeouts) DeepCopy() *BranchTimeouts { - if in == nil { - return nil - } - out := new(BranchTimeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackState) DeepCopyInto(out *CallbackState) { - *out = *in - in.Action.DeepCopyInto(&out.Action) - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(CallbackStateTimeout) - (*in).DeepCopyInto(*out) - } - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackState. -func (in *CallbackState) DeepCopy() *CallbackState { - if in == nil { - return nil - } - out := new(CallbackState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CallbackStateTimeout) DeepCopyInto(out *CallbackStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CallbackStateTimeout. -func (in *CallbackStateTimeout) DeepCopy() *CallbackStateTimeout { - if in == nil { - return nil - } - out := new(CallbackStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Common) DeepCopyInto(out *Common) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Common. -func (in *Common) DeepCopy() *Common { - if in == nil { - return nil - } - out := new(Common) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Constants) DeepCopyInto(out *Constants) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Constants. -func (in *Constants) DeepCopy() *Constants { - if in == nil { - return nil - } - out := new(Constants) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in ConstantsData) DeepCopyInto(out *ConstantsData) { - { - in := &in - *out = make(ConstantsData, len(*in)) - for key, val := range *in { - var outVal []byte - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = make(json.RawMessage, len(*in)) - copy(*out, *in) - } - (*out)[key] = outVal - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConstantsData. -func (in ConstantsData) DeepCopy() ConstantsData { - if in == nil { - return nil - } - out := new(ConstantsData) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ContinueAs) DeepCopyInto(out *ContinueAs) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - out.WorkflowExecTimeout = in.WorkflowExecTimeout - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ContinueAs. -func (in *ContinueAs) DeepCopy() *ContinueAs { - if in == nil { - return nil - } - out := new(ContinueAs) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Correlation) DeepCopyInto(out *Correlation) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation. -func (in *Correlation) DeepCopy() *Correlation { - if in == nil { - return nil - } - out := new(Correlation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Cron) DeepCopyInto(out *Cron) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cron. -func (in *Cron) DeepCopy() *Cron { - if in == nil { - return nil - } - out := new(Cron) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataCondition) DeepCopyInto(out *DataCondition) { - *out = *in - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCondition. -func (in *DataCondition) DeepCopy() *DataCondition { - if in == nil { - return nil - } - out := new(DataCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataInputSchema) DeepCopyInto(out *DataInputSchema) { - *out = *in - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(Object) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSchema. -func (in *DataInputSchema) DeepCopy() *DataInputSchema { - if in == nil { - return nil - } - out := new(DataInputSchema) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DefaultCondition) DeepCopyInto(out *DefaultCondition) { - *out = *in - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultCondition. -func (in *DefaultCondition) DeepCopy() *DefaultCondition { - if in == nil { - return nil - } - out := new(DefaultCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DelayState) DeepCopyInto(out *DelayState) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DelayState. -func (in *DelayState) DeepCopy() *DelayState { - if in == nil { - return nil - } - out := new(DelayState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *End) DeepCopyInto(out *End) { - *out = *in - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ContinueAs != nil { - in, out := &in.ContinueAs, &out.ContinueAs - *out = new(ContinueAs) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new End. -func (in *End) DeepCopy() *End { - if in == nil { - return nil - } - out := new(End) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Error) DeepCopyInto(out *Error) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Error. -func (in *Error) DeepCopy() *Error { - if in == nil { - return nil - } - out := new(Error) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Errors) DeepCopyInto(out *Errors) { - { - in := &in - *out = make(Errors, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Errors. -func (in Errors) DeepCopy() Errors { - if in == nil { - return nil - } - out := new(Errors) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Event) DeepCopyInto(out *Event) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Correlation != nil { - in, out := &in.Correlation, &out.Correlation - *out = make([]Correlation, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Event. -func (in *Event) DeepCopy() *Event { - if in == nil { - return nil - } - out := new(Event) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventCondition) DeepCopyInto(out *EventCondition) { - *out = *in - if in.EventDataFilter != nil { - in, out := &in.EventDataFilter, &out.EventDataFilter - *out = new(EventDataFilter) - **out = **in - } - if in.Metadata != nil { - in, out := &in.Metadata, &out.Metadata - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventCondition. -func (in *EventCondition) DeepCopy() *EventCondition { - if in == nil { - return nil - } - out := new(EventCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in EventConditions) DeepCopyInto(out *EventConditions) { - { - in := &in - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventConditions. -func (in EventConditions) DeepCopy() EventConditions { - if in == nil { - return nil - } - out := new(EventConditions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventDataFilter) DeepCopyInto(out *EventDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDataFilter. -func (in *EventDataFilter) DeepCopy() *EventDataFilter { - if in == nil { - return nil - } - out := new(EventDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventRef) DeepCopyInto(out *EventRef) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = new(Object) - (*in).DeepCopyInto(*out) - } - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventRef. -func (in *EventRef) DeepCopy() *EventRef { - if in == nil { - return nil - } - out := new(EventRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventState) DeepCopyInto(out *EventState) { - *out = *in - if in.OnEvents != nil { - in, out := &in.OnEvents, &out.OnEvents - *out = make([]OnEvents, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(EventStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventState. -func (in *EventState) DeepCopy() *EventState { - if in == nil { - return nil - } - out := new(EventState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventStateTimeout) DeepCopyInto(out *EventStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventStateTimeout. -func (in *EventStateTimeout) DeepCopy() *EventStateTimeout { - if in == nil { - return nil - } - out := new(EventStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Events) DeepCopyInto(out *Events) { - { - in := &in - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Events. -func (in Events) DeepCopy() Events { - if in == nil { - return nil - } - out := new(Events) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachState) DeepCopyInto(out *ForEachState) { - *out = *in - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(intstr.IntOrString) - **out = **in - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ForEachStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachState. -func (in *ForEachState) DeepCopy() *ForEachState { - if in == nil { - return nil - } - out := new(ForEachState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForEachStateTimeout) DeepCopyInto(out *ForEachStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForEachStateTimeout. -func (in *ForEachStateTimeout) DeepCopy() *ForEachStateTimeout { - if in == nil { - return nil - } - out := new(ForEachStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Function) DeepCopyInto(out *Function) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Function. -func (in *Function) DeepCopy() *Function { - if in == nil { - return nil - } - out := new(Function) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FunctionRef) DeepCopyInto(out *FunctionRef) { - *out = *in - if in.Arguments != nil { - in, out := &in.Arguments, &out.Arguments - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FunctionRef. -func (in *FunctionRef) DeepCopy() *FunctionRef { - if in == nil { - return nil - } - out := new(FunctionRef) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Functions) DeepCopyInto(out *Functions) { - { - in := &in - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Functions. -func (in Functions) DeepCopy() Functions { - if in == nil { - return nil - } - out := new(Functions) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectState) DeepCopyInto(out *InjectState) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(InjectStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectState. -func (in *InjectState) DeepCopy() *InjectState { - if in == nil { - return nil - } - out := new(InjectState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InjectStateTimeout) DeepCopyInto(out *InjectStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InjectStateTimeout. -func (in *InjectStateTimeout) DeepCopy() *InjectStateTimeout { - if in == nil { - return nil - } - out := new(InjectStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Metadata) DeepCopyInto(out *Metadata) { - { - in := &in - *out = make(Metadata, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Metadata. -func (in Metadata) DeepCopy() Metadata { - if in == nil { - return nil - } - out := new(Metadata) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OAuth2AuthProperties) DeepCopyInto(out *OAuth2AuthProperties) { - *out = *in - in.Common.DeepCopyInto(&out.Common) - if in.Scopes != nil { - in, out := &in.Scopes, &out.Scopes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Audiences != nil { - in, out := &in.Audiences, &out.Audiences - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OAuth2AuthProperties. -func (in *OAuth2AuthProperties) DeepCopy() *OAuth2AuthProperties { - if in == nil { - return nil - } - out := new(OAuth2AuthProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Object) DeepCopyInto(out *Object) { - *out = *in - if in.MapValue != nil { - in, out := &in.MapValue, &out.MapValue - *out = make(map[string]Object, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.SliceValue != nil { - in, out := &in.SliceValue, &out.SliceValue - *out = make([]Object, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Object. -func (in *Object) DeepCopy() *Object { - if in == nil { - return nil - } - out := new(Object) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnError) DeepCopyInto(out *OnError) { - *out = *in - if in.ErrorRefs != nil { - in, out := &in.ErrorRefs, &out.ErrorRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Transition != nil { - in, out := &in.Transition, &out.Transition - *out = new(Transition) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(End) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnError. -func (in *OnError) DeepCopy() *OnError { - if in == nil { - return nil - } - out := new(OnError) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OnEvents) DeepCopyInto(out *OnEvents) { - *out = *in - if in.EventRefs != nil { - in, out := &in.EventRefs, &out.EventRefs - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.EventDataFilter = in.EventDataFilter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OnEvents. -func (in *OnEvents) DeepCopy() *OnEvents { - if in == nil { - return nil - } - out := new(OnEvents) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationState) DeepCopyInto(out *OperationState) { - *out = *in - if in.Actions != nil { - in, out := &in.Actions, &out.Actions - *out = make([]Action, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(OperationStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationState. -func (in *OperationState) DeepCopy() *OperationState { - if in == nil { - return nil - } - out := new(OperationState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OperationStateTimeout) DeepCopyInto(out *OperationStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OperationStateTimeout. -func (in *OperationStateTimeout) DeepCopy() *OperationStateTimeout { - if in == nil { - return nil - } - out := new(OperationStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelState) DeepCopyInto(out *ParallelState) { - *out = *in - if in.Branches != nil { - in, out := &in.Branches, &out.Branches - *out = make([]Branch, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.NumCompleted = in.NumCompleted - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(ParallelStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelState. -func (in *ParallelState) DeepCopy() *ParallelState { - if in == nil { - return nil - } - out := new(ParallelState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParallelStateTimeout) DeepCopyInto(out *ParallelStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParallelStateTimeout. -func (in *ParallelStateTimeout) DeepCopy() *ParallelStateTimeout { - if in == nil { - return nil - } - out := new(ParallelStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ProduceEvent) DeepCopyInto(out *ProduceEvent) { - *out = *in - in.Data.DeepCopyInto(&out.Data) - if in.ContextAttributes != nil { - in, out := &in.ContextAttributes, &out.ContextAttributes - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProduceEvent. -func (in *ProduceEvent) DeepCopy() *ProduceEvent { - if in == nil { - return nil - } - out := new(ProduceEvent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Retries) DeepCopyInto(out *Retries) { - { - in := &in - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retries. -func (in Retries) DeepCopy() Retries { - if in == nil { - return nil - } - out := new(Retries) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Retry) DeepCopyInto(out *Retry) { - *out = *in - if in.Multiplier != nil { - in, out := &in.Multiplier, &out.Multiplier - *out = new(floatstr.Float32OrString) - **out = **in - } - out.MaxAttempts = in.MaxAttempts - out.Jitter = in.Jitter - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Retry. -func (in *Retry) DeepCopy() *Retry { - if in == nil { - return nil - } - out := new(Retry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Schedule) DeepCopyInto(out *Schedule) { - *out = *in - if in.Cron != nil { - in, out := &in.Cron, &out.Cron - *out = new(Cron) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schedule. -func (in *Schedule) DeepCopy() *Schedule { - if in == nil { - return nil - } - out := new(Schedule) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in Secrets) DeepCopyInto(out *Secrets) { - { - in := &in - *out = make(Secrets, len(*in)) - copy(*out, *in) - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Secrets. -func (in Secrets) DeepCopy() Secrets { - if in == nil { - return nil - } - out := new(Secrets) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Sleep) DeepCopyInto(out *Sleep) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Sleep. -func (in *Sleep) DeepCopy() *Sleep { - if in == nil { - return nil - } - out := new(Sleep) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepState) DeepCopyInto(out *SleepState) { - *out = *in - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SleepStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepState. -func (in *SleepState) DeepCopy() *SleepState { - if in == nil { - return nil - } - out := new(SleepState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SleepStateTimeout) DeepCopyInto(out *SleepStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SleepStateTimeout. -func (in *SleepStateTimeout) DeepCopy() *SleepStateTimeout { - if in == nil { - return nil - } - out := new(SleepStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Start) DeepCopyInto(out *Start) { - *out = *in - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(Schedule) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Start. -func (in *Start) DeepCopy() *Start { - if in == nil { - return nil - } - out := new(Start) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *State) DeepCopyInto(out *State) { - *out = *in - in.BaseState.DeepCopyInto(&out.BaseState) - if in.DelayState != nil { - in, out := &in.DelayState, &out.DelayState - *out = new(DelayState) - **out = **in - } - if in.EventState != nil { - in, out := &in.EventState, &out.EventState - *out = new(EventState) - (*in).DeepCopyInto(*out) - } - if in.OperationState != nil { - in, out := &in.OperationState, &out.OperationState - *out = new(OperationState) - (*in).DeepCopyInto(*out) - } - if in.ParallelState != nil { - in, out := &in.ParallelState, &out.ParallelState - *out = new(ParallelState) - (*in).DeepCopyInto(*out) - } - if in.SwitchState != nil { - in, out := &in.SwitchState, &out.SwitchState - *out = new(SwitchState) - (*in).DeepCopyInto(*out) - } - if in.ForEachState != nil { - in, out := &in.ForEachState, &out.ForEachState - *out = new(ForEachState) - (*in).DeepCopyInto(*out) - } - if in.InjectState != nil { - in, out := &in.InjectState, &out.InjectState - *out = new(InjectState) - (*in).DeepCopyInto(*out) - } - if in.CallbackState != nil { - in, out := &in.CallbackState, &out.CallbackState - *out = new(CallbackState) - (*in).DeepCopyInto(*out) - } - if in.SleepState != nil { - in, out := &in.SleepState, &out.SleepState - *out = new(SleepState) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new State. -func (in *State) DeepCopy() *State { - if in == nil { - return nil - } - out := new(State) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateDataFilter) DeepCopyInto(out *StateDataFilter) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateDataFilter. -func (in *StateDataFilter) DeepCopy() *StateDataFilter { - if in == nil { - return nil - } - out := new(StateDataFilter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StateExecTimeout) DeepCopyInto(out *StateExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StateExecTimeout. -func (in *StateExecTimeout) DeepCopy() *StateExecTimeout { - if in == nil { - return nil - } - out := new(StateExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in States) DeepCopyInto(out *States) { - { - in := &in - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - return - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new States. -func (in States) DeepCopy() States { - if in == nil { - return nil - } - out := new(States) - in.DeepCopyInto(out) - return *out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchState) DeepCopyInto(out *SwitchState) { - *out = *in - in.DefaultCondition.DeepCopyInto(&out.DefaultCondition) - if in.EventConditions != nil { - in, out := &in.EventConditions, &out.EventConditions - *out = make(EventConditions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.DataConditions != nil { - in, out := &in.DataConditions, &out.DataConditions - *out = make([]DataCondition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Timeouts != nil { - in, out := &in.Timeouts, &out.Timeouts - *out = new(SwitchStateTimeout) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchState. -func (in *SwitchState) DeepCopy() *SwitchState { - if in == nil { - return nil - } - out := new(SwitchState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SwitchStateTimeout) DeepCopyInto(out *SwitchStateTimeout) { - *out = *in - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SwitchStateTimeout. -func (in *SwitchStateTimeout) DeepCopy() *SwitchStateTimeout { - if in == nil { - return nil - } - out := new(SwitchStateTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Timeouts) DeepCopyInto(out *Timeouts) { - *out = *in - if in.WorkflowExecTimeout != nil { - in, out := &in.WorkflowExecTimeout, &out.WorkflowExecTimeout - *out = new(WorkflowExecTimeout) - **out = **in - } - if in.StateExecTimeout != nil { - in, out := &in.StateExecTimeout, &out.StateExecTimeout - *out = new(StateExecTimeout) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Timeouts. -func (in *Timeouts) DeepCopy() *Timeouts { - if in == nil { - return nil - } - out := new(Timeouts) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Transition) DeepCopyInto(out *Transition) { - *out = *in - if in.stateParent != nil { - in, out := &in.stateParent, &out.stateParent - *out = new(State) - (*in).DeepCopyInto(*out) - } - if in.ProduceEvents != nil { - in, out := &in.ProduceEvents, &out.ProduceEvents - *out = make([]ProduceEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Transition. -func (in *Transition) DeepCopy() *Transition { - if in == nil { - return nil - } - out := new(Transition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ValidatorContext) DeepCopyInto(out *ValidatorContext) { - *out = *in - if in.States != nil { - in, out := &in.States, &out.States - *out = make(map[string]State, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(map[string]Function, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(map[string]Event, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(map[string]Retry, len(*in)) - for key, val := range *in { - (*out)[key] = *val.DeepCopy() - } - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make(map[string]Error, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidatorContext. -func (in *ValidatorContext) DeepCopy() *ValidatorContext { - if in == nil { - return nil - } - out := new(ValidatorContext) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Workflow) DeepCopyInto(out *Workflow) { - *out = *in - in.BaseWorkflow.DeepCopyInto(&out.BaseWorkflow) - if in.States != nil { - in, out := &in.States, &out.States - *out = make(States, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make(Events, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Functions != nil { - in, out := &in.Functions, &out.Functions - *out = make(Functions, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Retries != nil { - in, out := &in.Retries, &out.Retries - *out = make(Retries, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Workflow. -func (in *Workflow) DeepCopy() *Workflow { - if in == nil { - return nil - } - out := new(Workflow) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowExecTimeout) DeepCopyInto(out *WorkflowExecTimeout) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowExecTimeout. -func (in *WorkflowExecTimeout) DeepCopy() *WorkflowExecTimeout { - if in == nil { - return nil - } - out := new(WorkflowExecTimeout) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkflowRef) DeepCopyInto(out *WorkflowRef) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowRef. -func (in *WorkflowRef) DeepCopy() *WorkflowRef { - if in == nil { - return nil - } - out := new(WorkflowRef) - in.DeepCopyInto(out) - return out -} diff --git a/model/object.go b/object/object.go similarity index 87% rename from model/object.go rename to object/object.go index b8360a7..2225ea5 100644 --- a/model/object.go +++ b/object/object.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package object import ( "bytes" @@ -34,17 +34,6 @@ const ( Bool ) -// Object is used to allow integration with DeepCopy tool by replacing 'interface' generic type. -// The DeepCopy tool allow us to easily import the Workflow types into a Kubernetes operator, -// which requires the DeepCopy method. -// -// It can marshal and unmarshal any type. -// This object type can be three types: -// - String - holds string values -// - Integer - holds int32 values, JSON marshal any number to float64 by default, during the marshaling process it is -// parsed to int32 -// -// +kubebuilder:validation:Type=object type Object struct { Type Type `json:"type,inline"` StringValue string `json:"strVal,inline"` @@ -55,7 +44,6 @@ type Object struct { BoolValue bool `json:"boolValue,inline"` } -// UnmarshalJSON implements json.Unmarshaler func (obj *Object) UnmarshalJSON(data []byte) error { data = bytes.TrimSpace(data) diff --git a/model/object_test.go b/object/object_test.go similarity index 97% rename from model/object_test.go rename to object/object_test.go index 0cf928f..6ebc3b2 100644 --- a/model/object_test.go +++ b/object/object_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package object import ( "encoding/json" diff --git a/parser/parser_test.go b/parser/parser_test.go deleted file mode 100644 index 8cc3de1..0000000 --- a/parser/parser_test.go +++ /dev/null @@ -1,1102 +0,0 @@ -// Copyright 2020 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package parser - -import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" - - "github.com/serverlessworkflow/sdk-go/v2/model" - "github.com/serverlessworkflow/sdk-go/v2/test" - "github.com/serverlessworkflow/sdk-go/v2/util" -) - -func TestBasicValidation(t *testing.T) { - rootPath := "./testdata/workflows" - files, err := os.ReadDir(rootPath) - assert.NoError(t, err) - - util.SetIncludePaths(append(util.IncludePaths(), filepath.Join(test.CurrentProjectPath(), "./parser/testdata"))) - - for _, file := range files { - if !file.IsDir() { - path := filepath.Join(rootPath, file.Name()) - workflow, err := FromFile(path) - - if assert.NoError(t, err, "Test File %s", path) { - assert.NotEmpty(t, workflow.ID, "Test File %s", file.Name()) - assert.NotEmpty(t, workflow.States, "Test File %s", file.Name()) - } - } - } -} - -func TestCustomValidators(t *testing.T) { - rootPath := "./testdata/workflows/witherrors" - files, err := os.ReadDir(rootPath) - assert.NoError(t, err) - for _, file := range files { - if !file.IsDir() { - _, err := FromFile(filepath.Join(rootPath, file.Name())) - assert.Error(t, err, "Test File %s", file.Name()) - } - } -} - -func TestFromFile(t *testing.T) { - files := []struct { - name string - f func(*testing.T, *model.Workflow) - }{ - { - "./testdata/workflows/greetings.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Equal(t, "greeting", w.ID) - assert.IsType(t, &model.OperationState{}, w.States[0].OperationState) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/actiondata-defaultvalue.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "greeting", w.ID) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.Equal(t, true, w.States[0].OperationState.Actions[0].ActionDataFilter.UseResults) - assert.Equal(t, "greeting", w.States[0].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/greetings.sw.yaml", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.IsType(t, "idx", w.States[0].ID) - assert.Equal(t, "greeting", w.ID) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.NotNil(t, w.States[0].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbaseddataandswitch.sw.json", - func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.Equal(t, "Start", w.States[0].Name) - assert.Equal(t, "CheckVisaStatus", w.States[1].Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].SwitchState) - assert.Equal(t, "PT1H", w.States[1].SwitchState.Timeouts.EventTimeout) - assert.Nil(t, w.States[1].End) - assert.NotNil(t, w.States[2].End) - assert.True(t, w.States[2].End.Terminate) - }, - }, { - "./testdata/workflows/conditionbasedstate.yaml", func(t *testing.T, w *model.Workflow) { - operationState := w.States[0].OperationState - assert.Equal(t, "${ .applicants | .age < 18 }", operationState.Actions[0].Condition) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - assert.NotNil(t, w.States[0].End) - assert.True(t, w.States[0].End.Terminate) - }, - }, { - "./testdata/workflows/eventbasedgreetingexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[1].EventRefs[0]) - assert.Equal(t, true, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreetingnonexclusive.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.Equal(t, "GreetingEvent2", w.Events[1].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - assert.Equal(t, "GreetingEvent2", eventState.OnEvents[0].EventRefs[1]) - assert.Equal(t, false, eventState.Exclusive) - }, - }, { - "./testdata/workflows/eventbasedgreeting.sw.p.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Greeting Workflow", w.Name) - assert.Equal(t, "GreetingEvent", w.Events[0].Name) - assert.NotNil(t, w.States[0]) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.OnEvents) - assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) - }, - }, { - "./testdata/workflows/eventbasedswitch.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Event Based Switch Transitions", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - assert.NotEmpty(t, w.States[0].EventConditions) - assert.Equal(t, "CheckVisaStatus", w.States[0].Name) - assert.IsType(t, model.EventCondition{}, w.States[0].EventConditions[0]) - }, - }, { - "./testdata/workflows/applicationrequest.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 1) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - }, - }, { - "./testdata/workflows/applicationrequest.multiauth.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - assert.Equal(t, "CheckApplication", w.Start.StateName) - assert.NotNil(t, w.States[1]) - assert.NotNil(t, w.States[1].OperationState) - operationState := w.States[1].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) - assert.NotNil(t, w.Auth) - auth := w.Auth - assert.Equal(t, len(auth), 2) - assert.Equal(t, "testAuth", auth[0].Name) - assert.Equal(t, model.AuthTypeBearer, auth[0].Scheme) - bearerProperties := auth[0].Properties.Bearer.Token - assert.Equal(t, "test_token", bearerProperties) - assert.Equal(t, "testAuth2", auth[1].Name) - assert.Equal(t, model.AuthTypeBasic, auth[1].Scheme) - basicProperties := auth[1].Properties.Basic - assert.Equal(t, "test_user", basicProperties.Username) - assert.Equal(t, "test_pwd", basicProperties.Password) - // metadata - assert.Equal(t, model.Metadata{"metadata1": model.FromString("metadata1"), "metadata2": model.FromString("metadata2")}, w.Metadata) - assert.Equal(t, model.Metadata{"auth1": model.FromString("auth1"), "auth2": model.FromString("auth2")}, auth[0].Properties.Bearer.Metadata) - }, - }, { - "./testdata/workflows/applicationrequest.rp.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/applicationrequest.url.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - eventState := w.States[0].SwitchState - assert.NotNil(t, eventState) - assert.NotEmpty(t, eventState.DataConditions) - assert.IsType(t, model.DataCondition{}, eventState.DataConditions[0]) - assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) - }, - }, { - "./testdata/workflows/checkinbox.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - operationState := w.States[0].OperationState - assert.NotNil(t, operationState) - assert.NotEmpty(t, operationState.Actions) - assert.Len(t, w.States, 2) - }, - }, { - // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/workflows/provisionorders.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Provision Orders", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].OperationState) - assert.NotEmpty(t, w.States[0].OperationState.Actions) - assert.Len(t, w.States[0].OnErrors, 3) - assert.Equal(t, "Missing order id", w.States[0].OnErrors[0].ErrorRef) - assert.Equal(t, "MissingId", w.States[0].OnErrors[0].Transition.NextState) - assert.Equal(t, "Missing order item", w.States[0].OnErrors[1].ErrorRef) - assert.Equal(t, "MissingItem", w.States[0].OnErrors[1].Transition.NextState) - assert.Equal(t, "Missing order quantity", w.States[0].OnErrors[2].ErrorRef) - assert.Equal(t, "MissingQuantity", w.States[0].OnErrors[2].Transition.NextState) - }, - }, { - "./testdata/workflows/checkinbox.cron-test.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Check Inbox Workflow", w.Name) - assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) - assert.Equal(t, "checkInboxFunction", w.States[0].OperationState.Actions[0].FunctionRef.RefName) - assert.Equal(t, "SendTextForHighPriority", w.States[0].Transition.NextState) - assert.Nil(t, w.States[0].End) - assert.NotNil(t, w.States[1].End) - assert.True(t, w.States[1].End.Terminate) - }, - }, { - "./testdata/workflows/applicationrequest-issue16.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Applicant Request Decision Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].SwitchState) - switchState := w.States[0].SwitchState - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.Equal(t, "CheckApplication", w.States[0].Name) - }, - }, { - // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/workflows/patientonboarding.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Patient Onboarding Workflow", w.Name) - assert.NotNil(t, w.States[0]) - assert.NotNil(t, w.States[0].EventState) - eventState := w.States[0].EventState - assert.NotNil(t, eventState) - assert.NotEmpty(t, w.Retries) - assert.Len(t, w.Retries, 1) - assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) - assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) - }, - }, { - "./testdata/workflows/greetings-secret.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 1) - }, - }, { - "./testdata/workflows/greetings-secret-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.Len(t, w.Secrets, 3) - }, - }, { - "./testdata/workflows/greetings-constants-file.sw.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Greeting Workflow", w.Name) - assert.NotEmpty(t, w.Constants) - assert.NotEmpty(t, w.Constants.Data["Translations"]) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/roomreadings.timeouts.file.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Room Temp and Humidity Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/purchaseorderworkflow.sw.json", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Purchase Order Workflow", w.Name) - assert.NotNil(t, w.Timeouts) - assert.Equal(t, "P30D", w.Timeouts.WorkflowExecTimeout.Duration) - assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) - }, - }, { - "./testdata/workflows/continue-as-example.yaml", func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "Notify Customer", w.Name) - switchState := w.States[1].SwitchState - - assert.NotNil(t, switchState) - assert.NotEmpty(t, switchState.DataConditions) - assert.IsType(t, model.DataCondition{}, switchState.DataConditions[0]) - - endDataCondition := switchState.DataConditions[0] - assert.Equal(t, "notifycustomerworkflow", endDataCondition.End.ContinueAs.WorkflowID) - assert.Equal(t, "1.0", endDataCondition.End.ContinueAs.Version) - assert.Equal(t, model.FromString("${ del(.customerCount) }"), endDataCondition.End.ContinueAs.Data) - assert.Equal(t, "GenerateReport", endDataCondition.End.ContinueAs.WorkflowExecTimeout.RunBefore) - assert.Equal(t, true, endDataCondition.End.ContinueAs.WorkflowExecTimeout.Interrupt) - assert.Equal(t, "PT1H", endDataCondition.End.ContinueAs.WorkflowExecTimeout.Duration) - }, - }, { - name: "./testdata/workflows/greetings-v08-spec.sw.yaml", - f: func(t *testing.T, w *model.Workflow) { - assert.Equal(t, "custom.greeting", w.ID) - assert.Equal(t, "1.0", w.Version) - assert.Equal(t, "0.8", w.SpecVersion) - - // Workflow "name" no longer a required property - assert.Empty(t, w.Name) - - // Functions: - assert.NotEmpty(t, w.Functions[0]) - assert.Equal(t, "greetingCustomFunction", w.Functions[0].Name) - assert.Equal(t, model.FunctionTypeCustom, w.Functions[0].Type) - assert.Equal(t, "/path/to/my/script/greeting.ts#CustomGreeting", w.Functions[0].Operation) - - assert.NotEmpty(t, w.Functions[1]) - assert.Equal(t, "sendTextFunction", w.Functions[1].Name) - assert.Equal(t, model.FunctionTypeGraphQL, w.Functions[1].Type) - assert.Equal(t, "http://myapis.org/inboxapi.json#sendText", w.Functions[1].Operation) - - assert.NotEmpty(t, w.Functions[2]) - assert.Equal(t, "greetingFunction", w.Functions[2].Name) - assert.Equal(t, model.FunctionTypeREST, w.Functions[2].Type) - assert.Equal(t, "file://myapis/greetingapis.json#greeting", w.Functions[2].Operation) - - // Delay state - assert.NotEmpty(t, w.States[0].DelayState.TimeDelay) - assert.Equal(t, "GreetDelay", w.States[0].Name) - assert.Equal(t, model.StateTypeDelay, w.States[0].Type) - assert.Equal(t, "StoreCarAuctionBid", w.States[0].Transition.NextState) - - // Event state - assert.NotEmpty(t, w.States[1].EventState.OnEvents) - assert.Equal(t, "StoreCarAuctionBid", w.States[1].Name) - assert.Equal(t, model.StateTypeEvent, w.States[1].Type) - assert.Equal(t, true, w.States[1].EventState.Exclusive) - assert.NotEmpty(t, true, w.States[1].EventState.OnEvents[0]) - assert.Equal(t, []string{"CarBidEvent"}, w.States[1].EventState.OnEvents[0].EventRefs) - assert.Equal(t, true, w.States[1].EventState.OnEvents[0].EventDataFilter.UseData) - assert.Equal(t, "test", w.States[1].EventState.OnEvents[0].EventDataFilter.Data) - assert.Equal(t, "testing", w.States[1].EventState.OnEvents[0].EventDataFilter.ToStateData) - assert.Equal(t, model.ActionModeParallel, w.States[1].EventState.OnEvents[0].ActionMode) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.RefName) - assert.Equal(t, "funcref1", w.States[1].EventState.OnEvents[0].Actions[0].Name) - assert.Equal(t, map[string]model.Object{"bid": model.FromString("${ .bid }")}, w.States[1].EventState.OnEvents[0].Actions[0].FunctionRef.Arguments) - - assert.NotEmpty(t, w.States[1].EventState.OnEvents[0].Actions[1].EventRef) - assert.Equal(t, "eventRefName", w.States[1].EventState.OnEvents[0].Actions[1].Name) - assert.Equal(t, "StoreBidFunction", w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ResultEventRef) - - data := model.FromString("${ .patientInfo }") - assert.Equal(t, &data, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.Data) - assert.Equal(t, map[string]model.Object{"customer": model.FromString("${ .customer }"), "time": model.FromInt(48)}, w.States[1].EventState.OnEvents[0].Actions[1].EventRef.ContextAttributes) - - assert.Equal(t, "PT1S", w.States[1].EventState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[1].EventState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "PT1H", w.States[1].EventState.Timeouts.EventTimeout) - assert.Equal(t, "PT3S", w.States[1].EventState.Timeouts.ActionExecTimeout) - - // Parallel state - assert.NotEmpty(t, w.States[2].ParallelState.Branches) - assert.Equal(t, "ShortDelayBranch", w.States[2].ParallelState.Branches[0].Name) - assert.Equal(t, "shortdelayworkflowid", w.States[2].ParallelState.Branches[0].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "PT5H", w.States[2].ParallelState.Branches[0].Timeouts.ActionExecTimeout) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Branches[0].Timeouts.BranchExecTimeout) - assert.Equal(t, "LongDelayBranch", w.States[2].ParallelState.Branches[1].Name) - assert.Equal(t, "longdelayworkflowid", w.States[2].ParallelState.Branches[1].Actions[0].SubFlowRef.WorkflowID) - assert.Equal(t, "ParallelExec", w.States[2].Name) - assert.Equal(t, model.StateTypeParallel, w.States[2].Type) - assert.Equal(t, model.CompletionTypeAtLeast, w.States[2].ParallelState.CompletionType) - assert.Equal(t, "PT6M", w.States[2].ParallelState.Timeouts.BranchExecTimeout) - assert.Equal(t, "PT1S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[2].ParallelState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, intstr.IntOrString{IntVal: 13}, w.States[2].ParallelState.NumCompleted) - - // Switch state - assert.NotEmpty(t, w.States[3].SwitchState.EventConditions) - assert.Equal(t, "CheckVisaStatusSwitchEventBased", w.States[3].Name) - assert.Equal(t, model.StateTypeSwitch, w.States[3].Type) - assert.Equal(t, "visaApprovedEvent", w.States[3].EventConditions[0].Name) - assert.Equal(t, "visaApprovedEventRef", w.States[3].EventConditions[0].EventRef) - assert.Equal(t, "HandleApprovedVisa", w.States[3].EventConditions[0].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "mastercard": model.FromString("disallowed"), - "visa": model.FromString("allowed"), - }, - w.States[3].EventConditions[0].Metadata, - ) - assert.Equal(t, "visaRejectedEvent", w.States[3].EventConditions[1].EventRef) - assert.Equal(t, "HandleRejectedVisa", w.States[3].EventConditions[1].Transition.NextState) - assert.Equal(t, - model.Metadata{ - "test": model.FromString("tested"), - }, - w.States[3].EventConditions[1].Metadata, - ) - assert.Equal(t, "PT1H", w.States[3].SwitchState.Timeouts.EventTimeout) - assert.Equal(t, "PT1S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[3].SwitchState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, "HandleNoVisaDecision", w.States[3].SwitchState.DefaultCondition.Transition.NextState) - - // DataBasedSwitchState - dataBased := w.States[4].SwitchState - assert.NotEmpty(t, dataBased.DataConditions) - assert.Equal(t, "CheckApplicationSwitchDataBased", w.States[4].Name) - dataCondition := dataBased.DataConditions[0] - assert.Equal(t, "${ .applicants | .age >= 18 }", dataCondition.Condition) - assert.Equal(t, "StartApplication", dataCondition.Transition.NextState) - assert.Equal(t, "RejectApplication", w.States[4].DefaultCondition.Transition.NextState) - assert.Equal(t, "PT1S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[4].SwitchState.Timeouts.StateExecTimeout.Single) - - // operation state - assert.NotEmpty(t, w.States[5].OperationState.Actions) - assert.Equal(t, "GreetSequential", w.States[5].Name) - assert.Equal(t, model.StateTypeOperation, w.States[5].Type) - assert.Equal(t, model.ActionModeSequential, w.States[5].OperationState.ActionMode) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].Name) - assert.NotNil(t, w.States[5].OperationState.Actions[0].FunctionRef) - assert.Equal(t, "greetingCustomFunction", w.States[5].OperationState.Actions[0].FunctionRef.RefName) - - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example", w.States[5].OperationState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT1H", w.States[5].OperationState.Actions[0].EventRef.ResultEventTimeout) - assert.Equal(t, "PT1H", w.States[5].OperationState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT1S", w.States[5].OperationState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT2S", w.States[5].OperationState.Timeouts.StateExecTimeout.Single) - - // forEach state - assert.NotEmpty(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "SendTextForHighPriority", w.States[6].Name) - assert.Equal(t, model.ForEachModeTypeSequential, w.States[6].ForEachState.Mode) - assert.Equal(t, model.StateTypeForEach, w.States[6].Type) - assert.Equal(t, "${ .messages }", w.States[6].ForEachState.InputCollection) - assert.Equal(t, "${ .outputMessages }", w.States[6].ForEachState.OutputCollection) - assert.Equal(t, "${ .this }", w.States[6].ForEachState.IterationParam) - - batchSize := intstr.FromInt(45) - assert.Equal(t, &batchSize, w.States[6].ForEachState.BatchSize) - - assert.NotNil(t, w.States[6].ForEachState.Actions) - assert.Equal(t, "test", w.States[6].ForEachState.Actions[0].Name) - assert.NotNil(t, w.States[6].ForEachState.Actions[0].FunctionRef) - assert.Equal(t, "sendTextFunction", w.States[6].ForEachState.Actions[0].FunctionRef.RefName) - assert.Equal(t, map[string]model.Object{"message": model.FromString("${ .singlemessage }")}, w.States[6].ForEachState.Actions[0].FunctionRef.Arguments) - - // assert.Equal(t, "example1", w.States[6].ForEachState.Actions[0].EventRef.TriggerEventRef) - // assert.Equal(t, "example2", w.States[6].ForEachState.Actions[0].EventRef.ResultEventRef) - // assert.Equal(t, "PT12H", w.States[6].ForEachState.Actions[0].EventRef.ResultEventTimeout) - - assert.Equal(t, "PT11H", w.States[6].ForEachState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT11S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22S", w.States[6].ForEachState.Timeouts.StateExecTimeout.Single) - - // Inject state - assert.Equal(t, "HelloInject", w.States[7].Name) - assert.Equal(t, model.StateTypeInject, w.States[7].Type) - assert.Equal(t, model.FromString("Hello World, last state!"), w.States[7].InjectState.Data["result"]) - assert.Equal(t, model.FromBool(false), w.States[7].InjectState.Data["boolValue"]) - assert.Equal(t, "PT11M", w.States[7].InjectState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[7].InjectState.Timeouts.StateExecTimeout.Single) - - // callback state - assert.NotEmpty(t, w.States[8].CallbackState.Action) - assert.Equal(t, "CheckCreditCallback", w.States[8].Name) - assert.Equal(t, model.StateTypeCallback, w.States[8].Type) - assert.Equal(t, "callCreditCheckMicroservice", w.States[8].CallbackState.Action.FunctionRef.RefName) - assert.Equal(t, - map[string]model.Object{ - "argsObj": model.FromMap(map[string]interface{}{"age": 10, "name": "hi"}), - "customer": model.FromString("${ .customer }"), - "time": model.FromInt(48), - }, - w.States[8].CallbackState.Action.FunctionRef.Arguments, - ) - assert.Equal(t, "PT10S", w.States[8].CallbackState.Action.Sleep.Before) - assert.Equal(t, "PT20S", w.States[8].CallbackState.Action.Sleep.After) - assert.Equal(t, "PT150M", w.States[8].CallbackState.Timeouts.ActionExecTimeout) - assert.Equal(t, "PT34S", w.States[8].CallbackState.Timeouts.EventTimeout) - assert.Equal(t, "PT115M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT22M", w.States[8].CallbackState.Timeouts.StateExecTimeout.Single) - - assert.Equal(t, true, w.States[8].CallbackState.EventDataFilter.UseData) - assert.Equal(t, "test data", w.States[8].CallbackState.EventDataFilter.Data) - assert.Equal(t, "${ .customer }", w.States[8].CallbackState.EventDataFilter.ToStateData) - - // sleepState - assert.NotEmpty(t, w.States[9].SleepState.Duration) - assert.Equal(t, "WaitForCompletionSleep", w.States[9].Name) - assert.Equal(t, model.StateTypeSleep, w.States[9].Type) - assert.Equal(t, "PT5S", w.States[9].SleepState.Duration) - assert.NotNil(t, w.States[9].SleepState.Timeouts) - assert.Equal(t, "PT100S", w.States[9].SleepState.Timeouts.StateExecTimeout.Total) - assert.Equal(t, "PT200S", w.States[9].SleepState.Timeouts.StateExecTimeout.Single) - assert.Equal(t, true, w.States[9].End.Terminate) - - // switch state with DefaultCondition as string - assert.NotEmpty(t, w.States[10].SwitchState) - assert.Equal(t, "HelloStateWithDefaultConditionString", w.States[10].Name) - assert.Equal(t, "${ true }", w.States[10].SwitchState.DataConditions[0].Condition) - assert.Equal(t, "HandleApprovedVisa", w.States[10].SwitchState.DataConditions[0].Transition.NextState) - assert.Equal(t, "SendTextForHighPriority", w.States[10].SwitchState.DefaultCondition.Transition.NextState) - assert.Equal(t, true, w.States[10].End.Terminate) - }, - }, { - "./testdata/workflows/dataInputSchemaValidation.yaml", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.DataInputSchema) - expected := model.DataInputSchema{} - data, err := util.LoadExternalResource("file://testdata/datainputschema.json") - err1 := util.UnmarshalObject("schema", data, &expected.Schema) - assert.Nil(t, err) - assert.Nil(t, err1) - assert.Equal(t, expected.Schema, w.DataInputSchema.Schema) - assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) - }, - }, { - "./testdata/workflows/dataInputSchemaObject.json", func(t *testing.T, w *model.Workflow) { - assert.NotNil(t, w.DataInputSchema) - expected := model.Object{} - err := json.Unmarshal([]byte("{\"title\": \"Hello World Schema\", \"properties\": {\"person\": "+ - "{\"type\": \"object\",\"properties\": {\"name\": {\"type\": \"string\"}},\"required\": "+ - "[\"name\"]}}, \"required\": [\"person\"]}"), - &expected) - fmt.Printf("err: %s\n", err) - fmt.Printf("schema: %+v\n", expected) - assert.Equal(t, &expected, w.DataInputSchema.Schema) - assert.Equal(t, false, w.DataInputSchema.FailOnValidationErrors) - }, - }, - } - for _, file := range files { - t.Run( - file.name, func(t *testing.T) { - workflow, err := FromFile(file.name) - if assert.NoError(t, err, "Test File %s", file.name) { - assert.NotNil(t, workflow, "Test File %s", file.name) - file.f(t, workflow) - } - }, - ) - } -} - -func TestUnmarshalWorkflowBasicTests(t *testing.T) { - t.Run("BasicWorkflowYamlNoAuthDefs", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: TestUnmarshalWorkflowBasicTests -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: inject - data: - result: Hello World! - end: true -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow) - - b, err := json.Marshal(workflow) - assert.Nil(t, err) - assert.True(t, !strings.Contains(string(b), "auth")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) - - t.Run("BasicWorkflowBasicAuthJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ], - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "transition": "Next Hello State" - }, - { - "name": "Next Hello State", - "type": "inject", - "data": { - "result": "Next Hello World!" - }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"transition\":{\"nextState\":\"Next Hello State\"},\"data\":{\"result\":\"Hello World!\"}},{\"name\":\"Next Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Next Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowBasicAuthStringJSONSource", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.8", - "auth": "testdata/workflows/urifiles/auth.json", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - assert.Nil(t, err) - assert.NotNil(t, workflow.Auth) - - b, _ := json.Marshal(workflow) - assert.Equal(t, "{\"id\":\"applicantrequest\",\"name\":\"Applicant Request Decision Workflow\",\"description\":\"Determine if applicant request is valid\",\"version\":\"1.0\",\"start\":{\"stateName\":\"Hello State\"},\"specVersion\":\"0.8\",\"expressionLang\":\"jq\",\"auth\":[{\"name\":\"testAuth\",\"scheme\":\"bearer\",\"properties\":{\"token\":\"test_token\"}},{\"name\":\"testAuth2\",\"scheme\":\"basic\",\"properties\":{\"username\":\"test_user\",\"password\":\"test_pwd\"}}],\"states\":[{\"name\":\"Hello State\",\"type\":\"inject\",\"end\":{\"terminate\":true},\"data\":{\"result\":\"Hello World!\"}}]}", - string(b)) - - }) - - t.Run("BasicWorkflowInteger", func(t *testing.T) { - workflow, err := FromJSONSource([]byte(` -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "Hello State", - "specVersion": "0.7", - "auth": 123, - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} -`)) - - assert.NotNil(t, err) - assert.Equal(t, "auth must be string or array", err.Error()) - assert.Nil(t, workflow) - }) -} - -func TestUnmarshalWorkflowSwitchState(t *testing.T) { - t.Run("WorkflowStatesTest", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowStatesTest -description: Inject Hello World -start: GreetDelay -metadata: - metadata1: metadata1 - metadata2: metadata2 -auth: -- name: testAuth - scheme: bearer - properties: - token: test_token - metadata: - auth1: auth1 - auth2: auth2 -events: -- name: StoreBidFunction - type: store -- name: CarBidEvent - type: store -- name: visaRejectedEvent - type: store -- name: visaApprovedEventRef - type: store -functions: -- name: callCreditCheckMicroservice - operation: http://myapis.org/creditcheck.json#checkCredit -- name: StoreBidFunction - operation: http://myapis.org/storebid.json#storeBid -- name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: -- name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid -- name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: bidFunctionRef - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .thatBid }" - time: 32 - name: bidEventRef - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec -- name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased -- name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: - nextState: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: - nextState: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT10H - stateExecTimeout: - total: PT10S - single: PT20S - defaultCondition: - transition: - nextState: HelloStateWithDefaultConditionString -- name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority -- name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject -- name: HelloInject - type: inject - data: - result: Hello World, another state! - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: WaitForCompletionSleep -- name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true -- name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - time: 48 - argsObj: { - "name" : "hi", - "age": { - "initial": 10, - "final": 32 - } - } - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT199M - eventTimeout: PT348S - stateExecTimeout: - total: PT115M - single: PT22M - transition: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 50 - name: eventRefName - timeouts: - actionExecTimeout: PT777S - stateExecTimeout: - total: PT33M - single: PT123M - transition: HandleApprovedVisaSubFlow -- name: HandleApprovedVisaSubFlow - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - name: subFlowRefName - end: - terminate: true -`)) - assert.NoError(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.NoError(t, err) - - // workflow and auth metadata - assert.True(t, strings.Contains(string(b), "\"metadata\":{\"metadata1\":\"metadata1\",\"metadata2\":\"metadata2\"}")) - assert.True(t, strings.Contains(string(b), ":{\"metadata\":{\"auth1\":\"auth1\",\"auth2\":\"auth2\"}")) - - // Callback state - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckCreditCallback\",\"type\":\"callback\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"},\"action\":{\"functionRef\":{\"refName\":\"callCreditCheckMicroservice\",\"arguments\":{\"argsObj\":{\"age\":{\"final\":32,\"initial\":10},\"name\":\"hi\"},\"customer\":\"${ .customer }\",\"time\":48},\"invoke\":\"sync\"},\"sleep\":{\"before\":\"PT10S\",\"after\":\"PT20S\"},\"actionDataFilter\":{\"useResults\":true}},\"eventRef\":\"CreditCheckCompletedEvent\",\"eventDataFilter\":{\"useData\":true,\"data\":\"test data\",\"toStateData\":\"${ .customer }\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT115M\"},\"actionExecTimeout\":\"PT199M\",\"eventTimeout\":\"PT348S\"}}")) - - // Operation State - assert.True(t, strings.Contains(string(b), `{"name":"HandleApprovedVisa","type":"operation","transition":{"nextState":"HandleApprovedVisaSubFlow"},"actionMode":"sequential","actions":[{"name":"eventRefName","eventRef":{"triggerEventRef":"StoreBidFunction","resultEventRef":"StoreBidFunction","data":"${ .patientInfo }","contextAttributes":{"customer":"${ .customer }","time":50},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"timeouts":{"stateExecTimeout":{"single":"PT123M","total":"PT33M"},"actionExecTimeout":"PT777S"}}`)) - - // Delay State - assert.True(t, strings.Contains(string(b), "{\"name\":\"GreetDelay\",\"type\":\"delay\",\"transition\":{\"nextState\":\"StoreCarAuctionBid\"},\"timeDelay\":\"PT5S\"}")) - - // Event State - assert.True(t, strings.Contains(string(b), "{\"name\":\"StoreCarAuctionBid\",\"type\":\"event\",\"transition\":{\"nextState\":\"ParallelExec\"},\"exclusive\":true,\"onEvents\":[{\"eventRefs\":[\"CarBidEvent\"],\"actionMode\":\"parallel\",\"actions\":[{\"name\":\"bidFunctionRef\",\"functionRef\":{\"refName\":\"StoreBidFunction\",\"arguments\":{\"bid\":\"${ .bid }\"},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}},{\"name\":\"bidEventRef\",\"eventRef\":{\"triggerEventRef\":\"StoreBidFunction\",\"resultEventRef\":\"StoreBidFunction\",\"data\":\"${ .patientInfo }\",\"contextAttributes\":{\"customer\":\"${ .thatBid }\",\"time\":32},\"invoke\":\"sync\"},\"actionDataFilter\":{\"useResults\":true}}],\"eventDataFilter\":{\"useData\":true,\"data\":\"test\",\"toStateData\":\"testing\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"actionExecTimeout\":\"PT3S\",\"eventTimeout\":\"PT1H\"}}")) - - // Parallel State - assert.True(t, strings.Contains(string(b), "{\"name\":\"ParallelExec\",\"type\":\"parallel\",\"transition\":{\"nextState\":\"CheckVisaStatusSwitchEventBased\"},\"branches\":[{\"name\":\"ShortDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"shortdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}],\"timeouts\":{\"actionExecTimeout\":\"PT5H\",\"branchExecTimeout\":\"PT6M\"}},{\"name\":\"LongDelayBranch\",\"actions\":[{\"subFlowRef\":{\"workflowId\":\"longdelayworkflowid\",\"invoke\":\"sync\",\"onParentComplete\":\"terminate\"},\"actionDataFilter\":{\"useResults\":true}}]}],\"completionType\":\"atLeast\",\"numCompleted\":13,\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT2S\",\"total\":\"PT1S\"},\"branchExecTimeout\":\"PT6M\"}}")) - - // Switch State - assert.True(t, strings.Contains(string(b), "{\"name\":\"CheckVisaStatusSwitchEventBased\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"HelloStateWithDefaultConditionString\"}},\"eventConditions\":[{\"name\":\"visaApprovedEvent\",\"eventRef\":\"visaApprovedEventRef\",\"metadata\":{\"mastercard\":\"disallowed\",\"visa\":\"allowed\"},\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"eventRef\":\"visaRejectedEvent\",\"metadata\":{\"test\":\"tested\"},\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}],\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT20S\",\"total\":\"PT10S\"},\"eventTimeout\":\"PT10H\"}}")) - - // Switch State with string DefaultCondition - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloStateWithDefaultConditionString\",\"type\":\"switch\",\"defaultCondition\":{\"transition\":{\"nextState\":\"SendTextForHighPriority\"}},\"dataConditions\":[{\"condition\":\"${ true }\",\"transition\":{\"nextState\":\"HandleApprovedVisa\"}},{\"condition\":\"${ false }\",\"transition\":{\"nextState\":\"HandleRejectedVisa\"}}]}")) - - // Foreach State - assert.True(t, strings.Contains(string(b), `{"name":"SendTextForHighPriority","type":"foreach","transition":{"nextState":"HelloInject"},"inputCollection":"${ .messages }","outputCollection":"${ .outputMessages }","iterationParam":"${ .this }","batchSize":45,"actions":[{"name":"test","functionRef":{"refName":"sendTextFunction","arguments":{"message":"${ .singlemessage }"},"invoke":"sync"},"actionDataFilter":{"useResults":true}}],"mode":"sequential","timeouts":{"stateExecTimeout":{"single":"PT22S","total":"PT11S"},"actionExecTimeout":"PT11H"}}`)) - - // Inject State - assert.True(t, strings.Contains(string(b), "{\"name\":\"HelloInject\",\"type\":\"inject\",\"transition\":{\"nextState\":\"WaitForCompletionSleep\"},\"data\":{\"result\":\"Hello World, another state!\"},\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT22M\",\"total\":\"PT11M\"}}}")) - - // Sleep State - assert.True(t, strings.Contains(string(b), "{\"name\":\"WaitForCompletionSleep\",\"type\":\"sleep\",\"end\":{\"terminate\":true},\"duration\":\"PT5S\",\"timeouts\":{\"stateExecTimeout\":{\"single\":\"PT200S\",\"total\":\"PT100S\"}}}")) - - workflow = nil - err = json.Unmarshal(b, &workflow) - // Make sure that the Action FunctionRef is unmarshalled correctly - assert.Equal(t, model.FromString("${ .singlemessage }"), workflow.States[5].ForEachState.Actions[0].FunctionRef.Arguments["message"]) - assert.Equal(t, "sendTextFunction", workflow.States[5].ForEachState.Actions[0].FunctionRef.RefName) - assert.NoError(t, err) - - }) - - t.Run("WorkflowSwitchStateDataConditions with wrong field name", func(t *testing.T) { - workflow, err := FromYAMLSource([]byte(` -id: helloworld -version: '1.0.0' -specVersion: '0.8' -name: WorkflowSwitchStateDataConditions with wrong field name -description: Inject Hello World -start: Hello State -states: -- name: Hello State - type: switch - dataCondition: - - condition: ${ true } - transition: - nextState: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: - transition: - nextState: HandleApprovedVisa -- name: HandleApprovedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleApprovedVisaWorkflowID - end: - terminate: true -- name: HandleRejectedVisa - type: operation - actions: - - subFlowRef: - workflowId: handleRejectedVisaWorkflowID - end: - terminate: true -- name: HandleNoVisaDecision - type: operation - actions: - - subFlowRef: - workflowId: handleNoVisaDecisionWorkflowId - end: - terminate: true -`)) - if assert.Error(t, err) { - assert.Equal(t, `workflow.states[0].switchState.dataConditions is required`, err.Error()) - } - assert.Nil(t, workflow) - }) - - t.Run("Test complex workflow with compensate transitions", func(t *testing.T) { - workflow, err := FromFile("./testdata/workflows/compensate.sw.json") - - assert.Nil(t, err) - assert.NotNil(t, workflow) - b, err := json.Marshal(workflow) - assert.Nil(t, err) - - workflow = nil - err = json.Unmarshal(b, &workflow) - assert.Nil(t, err) - }) -} diff --git a/parser/testdata/applicationrequestfunctions.json b/parser/testdata/applicationrequestfunctions.json deleted file mode 100644 index bafc861..0000000 --- a/parser/testdata/applicationrequestfunctions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/application.json#emailRejection" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/applicationrequestretries.json b/parser/testdata/applicationrequestretries.json deleted file mode 100644 index 40f83b5..0000000 --- a/parser/testdata/applicationrequestretries.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/constantsDogs.json b/parser/testdata/constantsDogs.json deleted file mode 100644 index cd3f101..0000000 --- a/parser/testdata/constantsDogs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Translations": { - "Dog": { - "Serbian": "pas", - "Spanish": "perro", - "French": "chien" - } - } -} \ No newline at end of file diff --git a/parser/testdata/datainputschema.json b/parser/testdata/datainputschema.json deleted file mode 100644 index bace233..0000000 --- a/parser/testdata/datainputschema.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - } -} \ No newline at end of file diff --git a/parser/testdata/errors.json b/parser/testdata/errors.json deleted file mode 100644 index 099e14d..0000000 --- a/parser/testdata/errors.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventbasedgreetingevents.json b/parser/testdata/eventbasedgreetingevents.json deleted file mode 100644 index b63f2bf..0000000 --- a/parser/testdata/eventbasedgreetingevents.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/eventdefs.yml b/parser/testdata/eventdefs.yml deleted file mode 100644 index dd2c3b7..0000000 --- a/parser/testdata/eventdefs.yml +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -events: -- name: PaymentReceivedEvent - type: payment.receive - source: paymentEventSource - correlation: - - contextAttributeName: accountId -- name: ConfirmationCompletedEvent - type: payment.confirmation - kind: produced \ No newline at end of file diff --git a/parser/testdata/functiondefs.json b/parser/testdata/functiondefs.json deleted file mode 100644 index fc7dd94..0000000 --- a/parser/testdata/functiondefs.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "functions": [ - { - "name": "checkFundsAvailability", - "operation": "file://myapis/billingapis.json#checkFunds" - }, - { - "name": "sendSuccessEmail", - "operation": "file://myapis/emailapis.json#paymentSuccess" - }, - { - "name": "sendInsufficientFundsEmail", - "operation": "file://myapis/emailapis.json#paymentInsufficientFunds" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json deleted file mode 100644 index e5316d9..0000000 --- a/parser/testdata/secrets.json +++ /dev/null @@ -1,6 +0,0 @@ - -[ - "SECRET1", - "SECRET2", - "SECRET3" -] \ No newline at end of file diff --git a/parser/testdata/timeouts.json b/parser/testdata/timeouts.json deleted file mode 100644 index c3586bd..0000000 --- a/parser/testdata/timeouts.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } -} \ No newline at end of file diff --git a/parser/testdata/workflows/VetAppointmentWorkflow.json b/parser/testdata/workflows/VetAppointmentWorkflow.json deleted file mode 100644 index f6c0d43..0000000 --- a/parser/testdata/workflows/VetAppointmentWorkflow.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "id": "VetAppointmentWorkflow", - "name": "Vet Appointment Workflow", - "description": "Vet service call via events", - "version": "1.0", - "specVersion": "0.8", - "start": "MakeVetAppointmentState", - "events": [ - { - "name": "MakeVetAppointment", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "produced" - }, - { - "name": "VetAppointmentInfo", - "source": "VetServiceSource", - "type": "events.vet.appointments", - "kind": "consumed" - } - ], - "states": [ - { - "name": "MakeVetAppointmentState", - "type": "operation", - "actions": [ - { - "name": "MakeAppointmentAction", - "eventRef": { - "triggerEventRef": "MakeVetAppointment", - "data": "${ .patientInfo }", - "resultEventRef": "VetAppointmentInfo" - }, - "actionDataFilter": { - "results": "${ .appointmentInfo }" - } - } - ], - "timeouts": { - "actionExecTimeout": "PT15M" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/actiondata-defaultvalue.yaml b/parser/testdata/workflows/actiondata-defaultvalue.yaml deleted file mode 100644 index 6b1628d..0000000 --- a/parser/testdata/workflows/actiondata-defaultvalue.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - id: greetingId - name: Greet - type: operation - actions: - - name: greeting - functionRef: - refName: greetingFunction - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue103.json b/parser/testdata/workflows/applicationrequest-issue103.json deleted file mode 100644 index 48b71fc..0000000 --- a/parser/testdata/workflows/applicationrequest-issue103.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.7", - "auth": "./testdata/workflows/urifiles/auth.yaml", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml deleted file mode 100644 index 395ac8b..0000000 --- a/parser/testdata/workflows/applicationrequest-issue16.sw.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2021 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: applicantrequest -version: '1.0' -name: Applicant Request Decision Workflow -description: Determine if applicant request is valid -start: CheckApplication -specVersion: "0.8" -functions: - - name: sendRejectionEmailFunction - operation: http://myapis.org/applicationapi.json#emailRejection -states: - - name: CheckApplication - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: StartApplication - - condition: "${ .applicants | .age < 18 }" - transition: RejectApplication - defaultCondition: - transition: RejectApplication - - name: StartApplication - type: operation - actions: - - subFlowRef: - workflowId: startApplicationWorkflowId - end: true - - name: RejectApplication - type: operation - actionMode: sequential - actions: - - functionRef: - refName: sendRejectionEmailFunction - arguments: - applicant: "${ .applicant }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest-issue69.json b/parser/testdata/workflows/applicationrequest-issue69.json deleted file mode 100644 index 99b373c..0000000 --- a/parser/testdata/workflows/applicationrequest-issue69.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": "file://testdata/workflows/urifiles/auth.json", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json deleted file mode 100644 index 674532a..0000000 --- a/parser/testdata/workflows/applicationrequest.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "specVersion": "0.8", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": "StartApplication" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.multiauth.json b/parser/testdata/workflows/applicationrequest.multiauth.json deleted file mode 100644 index cd7391d..0000000 --- a/parser/testdata/workflows/applicationrequest.multiauth.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "metadata":{ - "metadata1": "metadata1", - "metadata2": "metadata2" - }, - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token", - "metadata":{ - "auth1": "auth1", - "auth2": "auth2" - } - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json deleted file mode 100644 index 309cf8f..0000000 --- a/parser/testdata/workflows/applicationrequest.rp.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "start": { - "stateName": "CheckApplication" - }, - "functions": "file://testdata/applicationrequestfunctions.json", - "retries": "file://testdata/applicationrequestretries.json", - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json deleted file mode 100644 index a410993..0000000 --- a/parser/testdata/workflows/applicationrequest.url.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "specVersion": "0.8", - "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestfunctions.json", - "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json", - "start": { - "stateName": "CheckApplication" - }, - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "defaultCondition": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkInbox.json b/parser/testdata/workflows/checkInbox.json deleted file mode 100644 index 0256a8e..0000000 --- a/parser/testdata/workflows/checkInbox.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "checkInbox", - "name": "Check Inbox Workflow", - "version": "1.0", - "specVersion": "0.8", - "description": "Periodically Check Inbox", - "start": { - "stateName": "CheckInbox", - "schedule": { - "cron": "0 0/15 * * * ?" - } - }, - "functions": [ - { - "name": "checkInboxFunction", - "operation": "http://myapis.org/inboxapi.json#checkNewMessages" - }, - { - "name": "sendTextFunction", - "operation": "http://myapis.org/inboxapi.json#sendText" - } - ], - "states": [ - { - "name": "CheckInbox", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": "checkInboxFunction" - } - ], - "transition": "SendTextForHighPriority" - }, - { - "name": "SendTextForHighPriority", - "type": "foreach", - "inputCollection": "${ .messages }", - "iterationParam": "singlemessage", - "actions": [ - { - "functionRef": { - "refName": "sendTextFunction", - "arguments": { - "message": "${ .singlemessage }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkcarvitals.json b/parser/testdata/workflows/checkcarvitals.json deleted file mode 100644 index a0f14ef..0000000 --- a/parser/testdata/workflows/checkcarvitals.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "id": "checkcarvitals", - "name": "Check Car Vitals Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "WhenCarIsOn", - "states": [ - { - "name": "WhenCarIsOn", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "CarTurnedOnEvent" - ] - } - ], - "transition": "DoCarVitalChecks" - }, - { - "name": "DoCarVitalChecks", - "type": "operation", - "actions": [ - { - "subFlowRef": "vitalscheck", - "sleep": { - "after": "PT1S" - } - } - ], - "transition": "CheckContinueVitalChecks" - }, - { - "name": "CheckContinueVitalChecks", - "type": "switch", - "eventConditions": [ - { - "name": "Car Turned Off Condition", - "eventRef": "CarTurnedOffEvent", - "end": true - } - ], - "defaultCondition": { - "transition": "DoCarVitalChecks" - } - } - ], - "events": [ - { - "name": "CarTurnedOnEvent", - "type": "car.events", - "source": "my/car" - }, - { - "name": "CarTurnedOffEvent", - "type": "car.events", - "source": "my/car" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml deleted file mode 100644 index 0729e80..0000000 --- a/parser/testdata/workflows/checkinbox.cron-test.sw.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: checkInboxFunction - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: true \ No newline at end of file diff --git a/parser/testdata/workflows/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml deleted file mode 100644 index e42d9a2..0000000 --- a/parser/testdata/workflows/checkinbox.sw.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: checkInbox -name: Check Inbox Workflow -description: Periodically Check Inbox -version: '1.0' -specVersion: "0.8" -start: - stateName: CheckInbox - schedule: - cron: - expression: 0 0/15 * * * ? -functions: - - name: checkInboxFunction - operation: http://myapis.org/inboxapi.json#checkNewMessages - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText -states: - - name: CheckInbox - type: operation - actionMode: sequential - actions: - - functionRef: - refName: checkInboxFunction - transition: - nextState: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - iterationParam: singlemessage - actions: - - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/compensate.sw.json b/parser/testdata/workflows/compensate.sw.json deleted file mode 100644 index 9f6ab1f..0000000 --- a/parser/testdata/workflows/compensate.sw.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "id": "compensation", - "version": "1.0", - "name": "Workflow Error example", - "description": "An example of how compensation works", - "specVersion": "0.8", - "start": "printStatus", - "functions": [ - { - "name": "PrintOutput", - "type": "custom", - "operation": "sysout" - } - ], - "states": [ - { - "name": "printStatus", - "type": "inject", - "data": { - "compensated": false - }, - "compensatedBy": "compensating", - "transition": "branch" - }, - { - "name": "branch", - "type": "switch", - "dataConditions": [ - { - "condition": ".shouldCompensate==true", - "transition": { - "nextState": "finish_compensate", - "compensate": true - } - }, - { - "condition": ".shouldCompensate==false", - "transition": { - "nextState": "finish_not_compensate", - "compensate": false - } - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "compensating", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensated": true - }, - "transition": "compensating_more" - }, - { - "name": "compensating_more", - "usedForCompensation": true, - "type": "inject", - "data": { - "compensating_more": "Real Betis Balompie" - }, - "end": true - }, - { - "name": "finish_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - }, - { - "name": "finish_not_compensate", - "type": "operation", - "actions": [ - { - "name": "finish_not_compensate_sysout", - "functionRef": { - "refName": "PrintOutput", - "arguments": { - "message": "completed" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/conditionbasedstate.yaml b/parser/testdata/workflows/conditionbasedstate.yaml deleted file mode 100644 index f42b56d..0000000 --- a/parser/testdata/workflows/conditionbasedstate.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - condition: "${ .applicants | .age < 18 }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/continue-as-example.yaml b/parser/testdata/workflows/continue-as-example.yaml deleted file mode 100644 index b5957f5..0000000 --- a/parser/testdata/workflows/continue-as-example.yaml +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: notifycustomerworkflow -name: Notify Customer -version: '1.0' -specVersion: '0.8' -start: WaitForCustomerEvent -states: - - name: WaitForCustomerEvent - type: event - onEvents: - - eventRefs: - - CustomerEvent - eventDataFilter: - data: "${ .customerId }" - toStateData: "${ .eventCustomerId }" - actions: - - functionRef: - refName: NotifyCustomerFunction - arguments: - customerId: "${ .eventCustomerId }" - stateDataFilter: - output: "${ .count = .count + 1 }" - transition: CheckEventQuota - - name: CheckEventQuota - type: switch - dataConditions: - - condition: "${ try(.customerCount) != null and .customerCount > .quota.maxConsumedEvents}" - end: - continueAs: - workflowId: notifycustomerworkflow - version: '1.0' - data: "${ del(.customerCount) }" - workflowExecTimeout: - duration: "PT1H" - runBefore: "GenerateReport" - interrupt: true - defaultCondition: - transition: WaitForCustomerEvent -events: - - name: CustomerEvent - type: org.events.customerEvent - source: customerSource -functions: - - name: NotifyCustomerFunction - operation: http://myapis.org/customerapis.json#notifyCustomer \ No newline at end of file diff --git a/parser/testdata/workflows/customerbankingtransactions.json b/parser/testdata/workflows/customerbankingtransactions.json deleted file mode 100644 index 98fbd34..0000000 --- a/parser/testdata/workflows/customerbankingtransactions.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "id": "customerbankingtransactions", - "name": "Customer Banking Transactions Workflow", - "version": "1.0", - "specVersion": "0.8", - "autoRetries": true, - "constants": { - "largetxamount": 5000 - }, - "states": [ - { - "name": "ProcessTransactions", - "type": "foreach", - "inputCollection": "${ .customer.transactions }", - "iterationParam": "${ .tx }", - "actions": [ - { - "name": "Process Larger Transaction", - "functionRef": "Banking Service - Larger Tx", - "condition": "${ .tx >= $CONST.largetxamount }" - }, - { - "name": "Process Smaller Transaction", - "functionRef": "Banking Service - Smaller Tx", - "condition": "${ .tx < $CONST.largetxamount }" - } - ], - "end": true - } - ], - "functions": [ - { - "name": "Banking Service - Larger Tx", - "type": "asyncapi", - "operation": "banking.yaml#largerTransation" - }, - { - "name": "Banking Service - Smaller Tx", - "type": "asyncapi", - "operation": "banking.yaml#smallerTransation" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/customercreditcheck.json b/parser/testdata/workflows/customercreditcheck.json deleted file mode 100644 index 8a3914f..0000000 --- a/parser/testdata/workflows/customercreditcheck.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "customercreditcheck", - "version": "1.0", - "specVersion": "0.8", - "name": "Customer Credit Check Workflow", - "description": "Perform Customer Credit Check", - "start": "CheckCredit", - "functions": [ - { - "name": "creditCheckFunction", - "operation": "http://myapis.org/creditcheckapi.json#doCreditCheck" - }, - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/creditcheckapi.json#rejectionEmail" - }, - { - "name": "callCreditCheckMicroservice", - "operation": "http://myapis.org/creditcheckapi.json#creditCheckMicroservice" - } - ], - "events": [ - { - "name": "CreditCheckCompletedEvent", - "type": "creditCheckCompleteType", - "source": "creditCheckSource", - "correlation": [ - { - "contextAttributeName": "customerId" - } - ] - } - ], - "states": [ - { - "name": "CheckCredit", - "type": "callback", - "action": { - "functionRef": { - "refName": "callCreditCheckMicroservice", - "arguments": { - "customer": "${ .customer }" - } - } - }, - "eventRef": "CreditCheckCompletedEvent", - "timeouts": { - "stateExecTimeout": "PT15M" - }, - "transition": "EvaluateDecision" - }, - { - "name": "EvaluateDecision", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .creditCheck | .decision == \"Approved\" }", - "transition": "StartApplication" - }, - { - "condition": "${ .creditCheck | .decision == \"Denied\" }", - "transition": "RejectApplication" - } - ], - "defaultCondition": { - "transition": "RejectApplication" - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": "startApplicationWorkflowId" - } - ], - "end": true - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/dataInputSchemaObject.json b/parser/testdata/workflows/dataInputSchemaObject.json deleted file mode 100644 index 7b50c0d..0000000 --- a/parser/testdata/workflows/dataInputSchemaObject.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "id": "greeting", - "version": "1.0.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "dataInputSchema": { - "failOnValidationErrors": false, - "schema": { - "title": "Hello World Schema", - "properties": { - "person": { - "type": "object", - "properties": { - "name": { - "type": "string" - } - }, - "required": [ - "name" - ] - } - }, - "required": [ - "person" - ] - } - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ {greeting: .greeting} }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbaseddataandswitch.sw.json b/parser/testdata/workflows/eventbaseddataandswitch.sw.json deleted file mode 100644 index bdf80d6..0000000 --- a/parser/testdata/workflows/eventbaseddataandswitch.sw.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "id": "eventbaseddataandswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions with Event Database Condition", - "specVersion": "0.8", - "start": { - "stateName": "Start" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "Start", - "type": "switch", - "dataConditions": [ - { - "condition": "${ true }", - "transition": "CheckVisaStatus" - } - ], - "defaultCondition": { - "transition": { - "nextState": "CheckVisaStatus" - } - } - }, - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json deleted file mode 100644 index 3510d11..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "start": "Greet", - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .greet }", - "toStateData": "${ .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet.name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload.greeting }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json deleted file mode 100644 index 589ad36..0000000 --- a/parser/testdata/workflows/eventbasedgreeting.sw.p.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "eventbasedgreeting", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": "file://testdata/eventbasedgreetingevents.json", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json deleted file mode 100644 index 80e81b0..0000000 --- a/parser/testdata/workflows/eventbasedgreetingexclusive.sw.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "id": "eventbasedgreetingexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - }, - { - "name": "greetingFunction2", - "operation": "file://myapis/greetingapis.json#greeting2" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - }, - { - "eventRefs": [ - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "{{ $.data.greet2 }}" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction2", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json b/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json deleted file mode 100644 index 946aa39..0000000 --- a/parser/testdata/workflows/eventbasedgreetingnonexclusive.sw.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "eventbasedgreetingnonexclusive", - "version": "1.0", - "name": "Event Based Greeting Workflow", - "description": "Event Based Greeting", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "events": [ - { - "name": "GreetingEvent", - "type": "greetingEventType", - "source": "greetingEventSource" - }, - { - "name": "GreetingEvent2", - "type": "greetingEventType2", - "source": "greetingEventSource2" - } - ], - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "GreetingEvent", - "GreetingEvent2" - ], - "eventDataFilter": { - "data": "${ .data | .greet }" - }, - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .greet | .name }" - } - } - } - ] - } - ], - "stateDataFilter": { - "output": "${ .payload | .greeting }" - }, - "end": { - "terminate": true - } - } - ] -} diff --git a/parser/testdata/workflows/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json deleted file mode 100644 index 3d0075f..0000000 --- a/parser/testdata/workflows/eventbasedswitch.sw.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "id": "eventbasedswitch", - "version": "1.0", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "specVersion": "0.8", - "start": { - "stateName": "CheckVisaStatus" - }, - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": { - "nextState": "HandleApprovedVisa" - } - }, - { - "eventRef": "visaRejectedEvent", - "transition": { - "nextState": "HandleRejectedVisa" - } - } - ], - "timeouts": { - "eventTimeout": "PT1H" - }, - "defaultCondition": { - "transition": { - "nextState": "HandleNoVisaDecision" - } - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleApprovedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleRejectedVisaWorkflowID" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleNoVisaDecisionWorkfowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/eventbasedswitchstate.json b/parser/testdata/workflows/eventbasedswitchstate.json deleted file mode 100644 index c1b48b0..0000000 --- a/parser/testdata/workflows/eventbasedswitchstate.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "id": "eventbasedswitchstate", - "version": "1.0", - "specVersion": "0.8", - "name": "Event Based Switch Transitions", - "description": "Event Based Switch Transitions", - "start": "CheckVisaStatus", - "events": [ - { - "name": "visaApprovedEvent", - "type": "VisaApproved", - "source": "visaCheckSource" - }, - { - "name": "visaRejectedEvent", - "type": "VisaRejected", - "source": "visaCheckSource" - } - ], - "states": [ - { - "name": "CheckVisaStatus", - "type": "switch", - "eventConditions": [ - { - "eventRef": "visaApprovedEvent", - "transition": "HandleApprovedVisa" - }, - { - "eventRef": "visaRejectedEvent", - "transition": "HandleRejectedVisa" - } - ], - "eventTimeout": "PT1H", - "defaultCondition": { - "transition": "HandleNoVisaDecision" - } - }, - { - "name": "HandleApprovedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleApprovedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleRejectedVisa", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleRejectedVisaWorkflowID" - } - ], - "end": true - }, - { - "name": "HandleNoVisaDecision", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleNoVisaDecisionWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/fillglassofwater.json b/parser/testdata/workflows/fillglassofwater.json deleted file mode 100644 index b45d84e..0000000 --- a/parser/testdata/workflows/fillglassofwater.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "id": "fillglassofwater", - "name": "Fill glass of water workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "Check if full", - "functions": [ - { - "name": "Increment Current Count Function", - "type": "expression", - "operation": ".counts.current += 1 | .counts.current" - } - ], - "states": [ - { - "name": "Check if full", - "type": "switch", - "dataConditions": [ - { - "name": "Need to fill more", - "condition": "${ .counts.current < .counts.max }", - "transition": "Add Water" - }, - { - "name": "Glass full", - "condition": ".counts.current >= .counts.max", - "end": true - } - ], - "defaultCondition": { - "end": true - } - }, - { - "name": "Add Water", - "type": "operation", - "actions": [ - { - "functionRef": "Increment Current Count Function", - "actionDataFilter": { - "toStateData": ".counts.current" - } - } - ], - "transition": "Check if full" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/finalizeCollegeApplication.json b/parser/testdata/workflows/finalizeCollegeApplication.json deleted file mode 100644 index 9c93616..0000000 --- a/parser/testdata/workflows/finalizeCollegeApplication.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "id": "finalizeCollegeApplication", - "name": "Finalize College Application", - "version": "1.0", - "specVersion": "0.8", - "start": "FinalizeApplication", - "events": [ - { - "name": "ApplicationSubmitted", - "type": "org.application.submitted", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "SATScoresReceived", - "type": "org.application.satscores", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - }, - { - "name": "RecommendationLetterReceived", - "type": "org.application.recommendationLetter", - "source": "applicationsource", - "correlation": [ - { - "contextAttributeName": "applicantId" - } - ] - } - ], - "functions": [ - { - "name": "finalizeApplicationFunction", - "operation": "http://myapis.org/collegeapplicationapi.json#finalize" - } - ], - "states": [ - { - "name": "FinalizeApplication", - "type": "event", - "exclusive": false, - "onEvents": [ - { - "eventRefs": [ - "ApplicationSubmitted", - "SATScoresReceived", - "RecommendationLetterReceived" - ], - "actions": [ - { - "functionRef": { - "refName": "finalizeApplicationFunction", - "arguments": { - "student": "${ .applicantId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml deleted file mode 100644 index 00f04f3..0000000 --- a/parser/testdata/workflows/greetings-constants-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -constants: "file://testdata/constantsDogs.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml deleted file mode 100644 index 27d00e1..0000000 --- a/parser/testdata/workflows/greetings-secret-file.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: "file://testdata/secrets.json" -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .SECRET1 }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml deleted file mode 100644 index 2f64a98..0000000 --- a/parser/testdata/workflows/greetings-secret.sw.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -secrets: - - NAME -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .SECRETS | .NAME }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-v08-spec.sw.yaml b/parser/testdata/workflows/greetings-v08-spec.sw.yaml deleted file mode 100644 index 015a711..0000000 --- a/parser/testdata/workflows/greetings-v08-spec.sw.yaml +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: custom.greeting -version: '1.0' -specVersion: "0.8" -description: Greet Someone -# name: Greeting example #can be empty -# start: Greet #can be empty -functions: - - name: greetingCustomFunction - operation: /path/to/my/script/greeting.ts#CustomGreeting - # Support custom function type definition - type: custom - - name: sendTextFunction - operation: http://myapis.org/inboxapi.json#sendText - type: graphql - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting - - name: StoreBidFunction - operation: http://myapis.org/inboxapi.json#storeBidFunction - - name: callCreditCheckMicroservice - operation: http://myapis.org/inboxapi.json#callCreditCheckMicroservice -events: - - name: StoreBidFunction - type: StoreBidFunction - source: StoreBidFunction - - name: CarBidEvent - type: typeCarBidEvent - source: sourceCarBidEvent - - name: visaApprovedEventRef - type: typeVisaApprovedEventRef - source: sourceVisaApprovedEventRef - - name: visaRejectedEvent - type: typeVisaRejectedEvent - source: sourceVisaRejectedEvent -states: - - name: GreetDelay - type: delay - timeDelay: PT5S - transition: - nextState: StoreCarAuctionBid - - name: StoreCarAuctionBid - type: event - exclusive: true - onEvents: - - eventRefs: - - CarBidEvent - eventDataFilter: - useData: true - data: "test" - toStateData: "testing" - actionMode: parallel - actions: - - functionRef: - refName: StoreBidFunction - arguments: - bid: "${ .bid }" - name: funcref1 - - eventRef: - triggerEventRef: StoreBidFunction - data: "${ .patientInfo }" - resultEventRef: StoreBidFunction - contextAttributes: - customer: "${ .customer }" - time: 48 - name: eventRefName - timeouts: - eventTimeout: PT1H - actionExecTimeout: PT3S - stateExecTimeout: - total: PT1S - single: PT2S - transition: ParallelExec - - name: ParallelExec - type: parallel - completionType: atLeast - branches: - - name: ShortDelayBranch - actions: - - subFlowRef: shortdelayworkflowid - timeouts: - actionExecTimeout: "PT5H" - branchExecTimeout: "PT6M" - - name: LongDelayBranch - actions: - - subFlowRef: longdelayworkflowid - timeouts: - branchExecTimeout: "PT6M" - stateExecTimeout: - total: PT1S - single: PT2S - numCompleted: 13 - transition: CheckVisaStatusSwitchEventBased - - name: CheckVisaStatusSwitchEventBased - type: switch - eventConditions: - - name: visaApprovedEvent - eventRef: visaApprovedEventRef - transition: HandleApprovedVisa - metadata: - visa: allowed - mastercard: disallowed - - eventRef: visaRejectedEvent - transition: HandleRejectedVisa - metadata: - test: tested - timeouts: - eventTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - defaultCondition: - transition: HandleNoVisaDecision - - name: CheckApplicationSwitchDataBased - type: switch - dataConditions: - - condition: "${ .applicants | .age >= 18 }" - transition: - nextState: StartApplication - defaultCondition: - transition: RejectApplication - timeouts: - stateExecTimeout: - total: PT1S - single: PT2S - - name: GreetSequential - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - dataResultsPath: "${ .payload | .greeting }" - timeouts: - actionExecTimeout: PT1H - stateExecTimeout: - total: PT1S - single: PT2S - stateDataFilter: - dataOutputPath: "${ .greeting }" - transition: SendTextForHighPriority - - name: SendTextForHighPriority - type: foreach - inputCollection: "${ .messages }" - outputCollection: "${ .outputMessages }" - iterationParam: "${ .this }" - batchSize: 45 - mode: sequential - actions: - - name: test - functionRef: - refName: sendTextFunction - arguments: - message: "${ .singlemessage }" - timeouts: - actionExecTimeout: PT11H - stateExecTimeout: - total: PT11S - single: PT22S - transition: HelloInject - - name: HelloInject - type: inject - data: - result: Hello World, last state! - boolValue: false - timeouts: - stateExecTimeout: - total: PT11M - single: PT22M - transition: CheckCreditCallback - - name: CheckCreditCallback - type: callback - action: - functionRef: - refName: callCreditCheckMicroservice - arguments: - customer: "${ .customer }" - argsObj: { - "name" : "hi", - "age": 10 - } - time: 48 - sleep: - before: PT10S - after: PT20S - eventRef: CreditCheckCompletedEvent - eventDataFilter: - useData: true - data: "test data" - toStateData: "${ .customer }" - timeouts: - actionExecTimeout: PT150M - eventTimeout: PT34S - stateExecTimeout: - total: PT115M - single: PT22M - transition: WaitForCompletionSleep - - name: WaitForCompletionSleep - type: sleep - duration: PT5S - timeouts: - stateExecTimeout: - total: PT100S - single: PT200S - end: - terminate: true - - name: HelloStateWithDefaultConditionString - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: RejectApplication - type: switch - dataConditions: - - condition: ${ true } - transition: HandleApprovedVisa - - condition: ${ false } - transition: - nextState: HandleRejectedVisa - defaultCondition: SendTextForHighPriority - end: true - - name: HandleNoVisaDecision - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: StartApplication - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleApprovedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true - - name: HandleRejectedVisa - type: operation - actionMode: sequential - actions: - - name: greetingCustomFunction - functionRef: - refName: greetingCustomFunction - end: true diff --git a/parser/testdata/workflows/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json deleted file mode 100644 index 8adeeb6..0000000 --- a/parser/testdata/workflows/greetings.sw.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "specVersion": "0.8", - "name": "Greeting Workflow", - "description": "Greet Someone", - "start": "Greet", - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "arguments": { - "name": "${ .person.name }" - } - }, - "actionDataFilter": { - "results": "${ .greeting }" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml deleted file mode 100644 index 8f5447b..0000000 --- a/parser/testdata/workflows/greetings.sw.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2020 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: greeting -version: '1.0' -name: Greeting Workflow -description: Greet Someone -specVersion: "0.8" -start: - stateName: Greet -functions: - - name: greetingFunction - operation: file://myapis/greetingapis.json#greeting -states: - - name: Greet - id: idx - type: operation - actionMode: sequential - actions: - - functionRef: - refName: greetingFunction - parameters: - name: "${ .greet | .name }" - actionDataFilter: - toStateData: "${ .payload | .greeting }" - stateDataFilter: - dataOutputPath: "${ .greeting }" - end: - terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings_sleep.sw.json b/parser/testdata/workflows/greetings_sleep.sw.json deleted file mode 100644 index 9a434d4..0000000 --- a/parser/testdata/workflows/greetings_sleep.sw.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "greeting", - "version": "1.0", - "name": "Greeting Workflow", - "description": "Greet Someone", - "specVersion": "0.8", - "start": { - "stateName": "Greet" - }, - "functions": [ - { - "name": "greetingFunction", - "operation": "file://myapis/greetingapis.json#greeting" - } - ], - "states": [ - { - "name": "SleepHere", - "type": "sleep", - "timeouts": { - "stateExecTimeout": "PT10S" - }, - "duration": "PT40S", - "transition": "Greet" - }, - { - "name": "Greet", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "greetingFunction", - "parameters": { - "name": "${ .person | .name }" - } - }, - "actionDataFilter": { - "toStateData": "${ .greeting }" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/handleCarAuctionBid.json b/parser/testdata/workflows/handleCarAuctionBid.json deleted file mode 100644 index 6df46b0..0000000 --- a/parser/testdata/workflows/handleCarAuctionBid.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "id": "handleCarAuctionBid", - "version": "1.0", - "specVersion": "0.8", - "name": "Car Auction Bidding Workflow", - "description": "Store a single bid whole the car auction is active", - "start": { - "stateName": "StoreCarAuctionBid", - "schedule": "R/PT2H" - }, - "functions": [ - { - "name": "StoreBidFunction", - "operation": "http://myapis.org/carauctionapi.json#storeBid" - } - ], - "events": [ - { - "name": "CarBidEvent", - "type": "carBidMadeType", - "source": "carBidEventSource" - } - ], - "states": [ - { - "name": "StoreCarAuctionBid", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "CarBidEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "StoreBidFunction", - "arguments": { - "bid": "${ .bid }" - } - } - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/helloworld.json b/parser/testdata/workflows/helloworld.json deleted file mode 100644 index 707b6ef..0000000 --- a/parser/testdata/workflows/helloworld.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "id": "helloworld", - "version": "1.0", - "specVersion": "0.8", - "name": "Hello World Workflow", - "description": "Inject Hello World", - "start": "Hello State", - "states": [ - { - "name": "Hello State", - "type": "inject", - "data": { - "result": "Hello World!" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/jobmonitoring.json b/parser/testdata/workflows/jobmonitoring.json deleted file mode 100644 index a11282b..0000000 --- a/parser/testdata/workflows/jobmonitoring.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "id": "jobmonitoring", - "version": "1.0", - "specVersion": "0.8", - "name": "Job Monitoring", - "description": "Monitor finished execution of a submitted job", - "start": "SubmitJob", - "functions": [ - { - "name": "submitJob", - "operation": "http://myapis.org/monitorapi.json#doSubmit" - }, - { - "name": "checkJobStatus", - "operation": "http://myapis.org/monitorapi.json#checkStatus" - }, - { - "name": "reportJobSuceeded", - "operation": "http://myapis.org/monitorapi.json#reportSucceeded" - }, - { - "name": "reportJobFailed", - "operation": "http://myapis.org/monitorapi.json#reportFailure" - } - ], - "states": [ - { - "name": "SubmitJob", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "submitJob", - "arguments": { - "name": "${ .job.name }" - } - }, - "actionDataFilter": { - "results": "${ .jobuid }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobuid }" - }, - "transition": "WaitForCompletion" - }, - { - "name": "WaitForCompletion", - "type": "sleep", - "duration": "PT5S", - "transition": "GetJobStatus" - }, - { - "name": "GetJobStatus", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "checkJobStatus", - "arguments": { - "name": "${ .jobuid }" - } - }, - "actionDataFilter": { - "results": "${ .jobstatus }" - } - } - ], - "stateDataFilter": { - "output": "${ .jobstatus }" - }, - "transition": "DetermineCompletion" - }, - { - "name": "DetermineCompletion", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .jobStatus == \"SUCCEEDED\" }", - "transition": "JobSucceeded" - }, - { - "condition": "${ .jobStatus == \"FAILED\" }", - "transition": "JobFailed" - } - ], - "defaultCondition": { - "transition": "WaitForCompletion" - } - }, - { - "name": "JobSucceeded", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobSuceeded", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - }, - { - "name": "JobFailed", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "reportJobFailed", - "arguments": { - "name": "${ .jobuid }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/onboardcustomer.json b/parser/testdata/workflows/onboardcustomer.json deleted file mode 100644 index 85cb0d6..0000000 --- a/parser/testdata/workflows/onboardcustomer.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "id": "onboardcustomer", - "version": "1.0", - "specVersion": "0.8", - "name": "Onboard Customer", - "description": "Onboard a Customer", - "start": "Onboard", - "states": [ - { - "name": "Onboard", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "invoke": "async", - "onParentComplete": "continue", - "workflowId": "customeronboardingworkflow", - "version": "1.0" - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/parallelexec.json b/parser/testdata/workflows/parallelexec.json deleted file mode 100644 index 7e33893..0000000 --- a/parser/testdata/workflows/parallelexec.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "parallelexec", - "version": "1.0", - "specVersion": "0.8", - "name": "Parallel Execution Workflow", - "description": "Executes two branches in parallel", - "start": "ParallelExec", - "states": [ - { - "name": "ParallelExec", - "type": "parallel", - "completionType": "allOf", - "branches": [ - { - "name": "ShortDelayBranch", - "actions": [ - { - "subFlowRef": "shortdelayworkflowid" - } - ] - }, - { - "name": "LongDelayBranch", - "actions": [ - { - "subFlowRef": "longdelayworkflowid" - } - ] - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientVitalsWorkflow.json b/parser/testdata/workflows/patientVitalsWorkflow.json deleted file mode 100644 index a4fd8b5..0000000 --- a/parser/testdata/workflows/patientVitalsWorkflow.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "id": "patientVitalsWorkflow", - "name": "Monitor Patient Vitals", - "version": "1.0", - "specVersion": "0.8", - "start": "MonitorVitals", - "events": [ - { - "name": "HighBodyTemperature", - "type": "org.monitor.highBodyTemp", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighBloodPressure", - "type": "org.monitor.highBloodPressure", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - }, - { - "name": "HighRespirationRate", - "type": "org.monitor.highRespirationRate", - "source": "monitoringSource", - "correlation": [ - { - "contextAttributeName": "patientId" - } - ] - } - ], - "functions": [ - { - "name": "callPulmonologist", - "operation": "http://myapis.org/patientapis.json#callPulmonologist" - }, - { - "name": "sendTylenolOrder", - "operation": "http://myapis.org/patientapis.json#tylenolOrder" - }, - { - "name": "callNurse", - "operation": "http://myapis.org/patientapis.json#callNurse" - } - ], - "states": [ - { - "name": "MonitorVitals", - "type": "event", - "exclusive": true, - "onEvents": [ - { - "eventRefs": [ - "HighBodyTemperature" - ], - "actions": [ - { - "functionRef": { - "refName": "sendTylenolOrder", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighBloodPressure" - ], - "actions": [ - { - "functionRef": { - "refName": "callNurse", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - }, - { - "eventRefs": [ - "HighRespirationRate" - ], - "actions": [ - { - "functionRef": { - "refName": "callPulmonologist", - "arguments": { - "patientid": "${ .patientId }" - } - } - } - ] - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml deleted file mode 100644 index 6ceb1a1..0000000 --- a/parser/testdata/workflows/patientonboarding.sw.yaml +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2022 The Serverless Workflow Specification Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -id: patientonboarding -name: Patient Onboarding Workflow -version: "1.0" -specVersion: "0.8" -start: Onboard -states: - - name: Onboard - type: event - onEvents: - - eventRefs: - - NewPatientEvent - actions: - - functionRef: StorePatient - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: AssignDoctor - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - - functionRef: ScheduleAppt - retryRef: ServicesNotAvailableRetryStrategy - retryableErrors: - - ServiceNotAvailable - onErrors: - - errorRef: ServiceNotAvailable - end: true - end: true -events: - - name: NewPatientEvent - type: new.patients.event - source: newpatient/+ -functions: - - name: StorePatient - operation: api/services.json#storePatient - - name: StoreNewPatientInfo - operation: api/services.json#addPatient - - name: AssignDoctor - operation: api/services.json#assignDoctor - - name: ScheduleAppt - operation: api/services.json#scheduleAppointment -errors: - - name: ServiceNotAvailable - code: "503" -retries: - - name: ServicesNotAvailableRetryStrategy - delay: PT3S - maxAttempts: 10 - jitter: 0.0 - multiplier: 1.1 \ No newline at end of file diff --git a/parser/testdata/workflows/paymentconfirmation.json b/parser/testdata/workflows/paymentconfirmation.json deleted file mode 100644 index 815a73c..0000000 --- a/parser/testdata/workflows/paymentconfirmation.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "paymentconfirmation", - "version": "1.0", - "specVersion": "0.8", - "name": "Payment Confirmation Workflow", - "description": "Performs Payment Confirmation", - "functions": "file://functiondefs.json", - "events": "file://eventdefs.yml", - "states": [ - { - "name": "PaymentReceived", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "PaymentReceivedEvent" - ], - "actions": [ - { - "name": "checkfunds", - "functionRef": { - "refName": "checkFundsAvailability", - "arguments": { - "account": "${ .accountId }", - "paymentamount": "${ .payment.amount }" - } - } - } - ] - } - ], - "transition": "ConfirmBasedOnFunds" - }, - { - "name": "ConfirmBasedOnFunds", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .funds | .available == \"true\" }", - "transition": "SendPaymentSuccess" - }, - { - "condition": "${ .funds | .available == \"false\" }", - "transition": "SendInsufficientResults" - } - ], - "defaultCondition": { - "transition": "SendPaymentSuccess" - } - }, - { - "name": "SendPaymentSuccess", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendSuccessEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - }, - { - "name": "SendInsufficientResults", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "sendInsufficientFundsEmail", - "arguments": { - "applicant": "${ .customer }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "ConfirmationCompletedEvent", - "data": "${ .payment }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json deleted file mode 100644 index 7496b32..0000000 --- a/parser/testdata/workflows/provisionorders.sw.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "id": "provisionorders", - "version": "1.0", - "specVersion": "0.8", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": "ProvisionOrder", - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "errors": [ - { - "name": "Missing order id" - }, - { - "name": "Missing order item" - }, - { - "name": "Missing order quantity" - } - ], - "states": [ - { - "name": "ProvisionOrder", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .order }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .exceptions }" - }, - "transition": "ApplyOrder", - "onErrors": [ - { - "errorRef": "Missing order id", - "transition": "MissingId" - }, - { - "errorRef": "Missing order item", - "transition": "MissingItem" - }, - { - "errorRef": "Missing order quantity", - "transition": "MissingQuantity" - } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingIdExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingItemExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ - { - "subFlowRef": "handleMissingQuantityExceptionWorkflow" - } - ], - "end": true - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ - { - "subFlowRef": "applyOrderWorkflowId" - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json deleted file mode 100644 index 2596b04..0000000 --- a/parser/testdata/workflows/purchaseorderworkflow.sw.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "id": "order", - "name": "Purchase Order Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "StartNewOrder", - "timeouts": { - "workflowExecTimeout": { - "duration": "P30D", - "runBefore": "CancelOrder" - } - }, - "states": [ - { - "name": "StartNewOrder", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderCreatedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogNewOrderCreated" - } - } - ] - } - ], - "transition": { - "nextState": "WaitForOrderConfirmation" - } - }, - { - "name": "WaitForOrderConfirmation", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "OrderConfirmedEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderConfirmed" - } - } - ] - } - ], - "transition": { - "nextState": "WaitOrderShipped" - } - }, - { - "name": "WaitOrderShipped", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "ShipmentSentEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogOrderShipped" - } - } - ] - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderFinishedEvent" - } - ] - } - }, - { - "name": "CancelOrder", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "CancelOrder" - } - } - ], - "end": { - "terminate": true, - "produceEvents": [ - { - "eventRef": "OrderCancelledEvent" - } - ] - } - } - ], - "events": [ - { - "name": "OrderCreatedEvent", - "type": "my.company.orders", - "source": "/orders/new", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderConfirmedEvent", - "type": "my.company.orders", - "source": "/orders/confirmed", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "ShipmentSentEvent", - "type": "my.company.orders", - "source": "/orders/shipped", - "correlation": [ - { - "contextAttributeName": "orderid" - } - ] - }, - { - "name": "OrderFinishedEvent", - "type": "my.company.orders", - "kind": "produced" - }, - { - "name": "OrderCancelledEvent", - "type": "my.company.orders", - "kind": "produced" - } - ], - "functions": [ - { - "name": "LogNewOrderCreated", - "operation": "http.myorg.io/ordersservices.json#logcreated" - }, - { - "name": "LogOrderConfirmed", - "operation": "http.myorg.io/ordersservices.json#logconfirmed" - }, - { - "name": "LogOrderShipped", - "operation": "http.myorg.io/ordersservices.json#logshipped" - }, - { - "name": "CancelOrder", - "operation": "http.myorg.io/ordersservices.json#calcelorder" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json deleted file mode 100644 index 9040643..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": "file://testdata/timeouts.json", - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": ["TemperatureEvent", "HumidityEvent"], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json deleted file mode 100644 index 90c7c62..0000000 --- a/parser/testdata/workflows/roomreadings.timeouts.sw.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "id": "roomreadings", - "name": "Room Temp and Humidity Workflow", - "version": "1.0", - "specVersion": "0.8", - "start": "ConsumeReading", - "timeouts": { - "workflowExecTimeout": { - "duration": "PT1H", - "runBefore": "GenerateReport" - } - }, - "keepActive": true, - "states": [ - { - "name": "ConsumeReading", - "type": "event", - "onEvents": [ - { - "eventRefs": [ - "TemperatureEvent", - "HumidityEvent" - ], - "actions": [ - { - "functionRef": { - "refName": "LogReading" - } - } - ], - "eventDataFilter": { - "toStateData": "${ .readings }" - } - } - ], - "end": true - }, - { - "name": "GenerateReport", - "type": "operation", - "actions": [ - { - "functionRef": { - "refName": "ProduceReport", - "arguments": { - "data": "${ .readings }" - } - } - } - ], - "end": { - "terminate": true - } - } - ], - "events": [ - { - "name": "TemperatureEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - }, - { - "name": "HumidityEvent", - "type": "my.home.sensors", - "source": "/home/rooms/+", - "correlation": [ - { - "contextAttributeName": "roomId" - } - ] - } - ], - "functions": [ - { - "name": "LogReading", - "operation": "http.myorg.io/ordersservices.json#logreading" - }, - { - "name": "ProduceReport", - "operation": "http.myorg.io/ordersservices.json#produceReport" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcloudeventonprovision.json b/parser/testdata/workflows/sendcloudeventonprovision.json deleted file mode 100644 index 7e5bc37..0000000 --- a/parser/testdata/workflows/sendcloudeventonprovision.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "id": "sendcloudeventonprovision", - "version": "1.0", - "specVersion": "0.8", - "name": "Send CloudEvent on provision completion", - "start": "ProvisionOrdersState", - "events": [ - { - "name": "provisioningCompleteEvent", - "type": "provisionCompleteType", - "kind": "produced" - } - ], - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioning.json#doProvision" - } - ], - "states": [ - { - "name": "ProvisionOrdersState", - "type": "foreach", - "inputCollection": "${ .orders }", - "iterationParam": "singleorder", - "outputCollection": "${ .provisionedOrders }", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "arguments": { - "order": "${ .singleorder }" - } - } - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "provisioningCompleteEvent", - "data": "${ .provisionedOrders }" - } - ] - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/sendcustomeremail.json b/parser/testdata/workflows/sendcustomeremail.json deleted file mode 100644 index 7e8d010..0000000 --- a/parser/testdata/workflows/sendcustomeremail.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id": "sendcustomeremail", - "version": "1.0", - "specVersion": "0.8", - "name": "Send customer email workflow", - "description": "Send email to a customer", - "start": "Send Email", - "functions": [ - { - "name": "emailFunction", - "operation": "file://myapis/emailapis.json#sendEmail" - } - ], - "states": [ - { - "name": "Send Email", - "type": "operation", - "actions": [ - { - "functionRef": { - "invoke": "async", - "refName": "emailFunction", - "arguments": { - "customer": "${ .customer }" - } - } - } - ], - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/solvemathproblems.json b/parser/testdata/workflows/solvemathproblems.json deleted file mode 100644 index a3083d0..0000000 --- a/parser/testdata/workflows/solvemathproblems.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "id": "solvemathproblems", - "version": "1.0", - "specVersion": "0.8", - "name": "Solve Math Problems Workflow", - "description": "Solve math problems", - "start": "Solve", - "functions": [ - { - "name": "solveMathExpressionFunction", - "operation": "http://myapis.org/mapthapis.json#solveExpression" - } - ], - "states": [ - { - "name": "Solve", - "type": "foreach", - "inputCollection": "${ .expressions }", - "iterationParam": "singleexpression", - "outputCollection": "${ .results }", - "actions": [ - { - "functionRef": { - "refName": "solveMathExpressionFunction", - "arguments": { - "expression": "${ .singleexpression }" - } - } - } - ], - "stateDataFilter": { - "output": "${ .results }" - }, - "end": true - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/urifiles/auth.json b/parser/testdata/workflows/urifiles/auth.json deleted file mode 100644 index ff211df..0000000 --- a/parser/testdata/workflows/urifiles/auth.json +++ /dev/null @@ -1,17 +0,0 @@ -[ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth2", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } -] \ No newline at end of file diff --git a/parser/testdata/workflows/vitalscheck.json b/parser/testdata/workflows/vitalscheck.json deleted file mode 100644 index 3a89b78..0000000 --- a/parser/testdata/workflows/vitalscheck.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": "vitalscheck", - "name": "Car Vitals Check", - "version": "1.0", - "specVersion": "0.8", - "start": "CheckVitals", - "states": [ - { - "name": "CheckVitals", - "type": "operation", - "actions": [ - { - "functionRef": "Check Tire Pressure" - }, - { - "functionRef": "Check Oil Pressure" - }, - { - "functionRef": "Check Coolant Level" - }, - { - "functionRef": "Check Battery" - } - ], - "end": { - "produceEvents": [ - { - "eventRef": "DisplayChecksOnDashboard", - "data": "${ .evaluations }" - } - ] - } - } - ], - "functions": [ - { - "name": "Check Tire Pressure", - "operation": "mycarservices.json#checktirepressure" - }, - { - "name": "Check Oil Pressure", - "operation": "mycarservices.json#checkoilpressure" - }, - { - "name": "Check Coolant Level", - "operation": "mycarservices.json#checkcoolantlevel" - }, - { - "name": "Check Battery", - "operation": "mycarservices.json#checkbattery" - } - ] -} \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json b/parser/testdata/workflows/witherrors/applicationrequest-issue44.json deleted file mode 100644 index c0b72c8..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue44.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "P1S", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json b/parser/testdata/workflows/witherrors/applicationrequest-issue74.json deleted file mode 100644 index e72712d..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest-issue74.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [{ - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }], - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }" - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json b/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json deleted file mode 100644 index d01c35e..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.auth.invalid.format.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] - } \ No newline at end of file diff --git a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json b/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json deleted file mode 100644 index 101b9bf..0000000 --- a/parser/testdata/workflows/witherrors/applicationrequest.authdupl.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "id": "applicantrequest", - "version": "1.0", - "name": "Applicant Request Decision Workflow", - "description": "Determine if applicant request is valid", - "start": "CheckApplication", - "specVersion": "0.8", - "auth": [ - { - "name": "testAuth", - "scheme": "bearer", - "properties": { - "token": "test_token" - } - }, - { - "name": "testAuth", - "scheme": "basic", - "properties": { - "username": "test_user", - "password": "test_pwd" - } - } - ] - , - "functions": [ - { - "name": "sendRejectionEmailFunction", - "operation": "http://myapis.org/applicationapi.json#emailRejection" - } - ], - "retries": [ - { - "name": "TimeoutRetryStrategy", - "delay": "PT1M", - "maxAttempts": "5" - } - ], - "states": [ - { - "name": "CheckApplication", - "type": "switch", - "dataConditions": [ - { - "condition": "${ .applicants | .age >= 18 }", - "transition": { - "nextState": "StartApplication" - } - }, - { - "condition": "${ .applicants | .age < 18 }", - "transition": { - "nextState": "RejectApplication" - } - } - ], - "default": { - "transition": { - "nextState": "RejectApplication" - } - } - }, - { - "name": "StartApplication", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "startApplicationWorkflowId" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "RejectApplication", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "sendRejectionEmailFunction", - "parameters": { - "applicant": "${ .applicant }" - } - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/util/floatstr/floatstr.go b/util/floatstr/floatstr.go deleted file mode 100644 index 3261fdd..0000000 --- a/util/floatstr/floatstr.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "fmt" - "strconv" -) - -// Float32OrString is a type that can hold a float32 or a string. -// implementation borrowed from apimachinary intstr package: https://github.com/kubernetes/apimachinery/blob/master/pkg/util/intstr/intstr.go -type Float32OrString struct { - Type Type `json:"type,omitempty"` - FloatVal float32 `json:"floatVal,omitempty"` - StrVal string `json:"strVal,omitempty"` -} - -// Type represents the stored type of Float32OrString. -type Type int64 - -const ( - // Float ... - Float Type = iota // The Float32OrString holds a float. - // String ... - String // The Float32OrString holds a string. -) - -// FromFloat creates an Float32OrString object with a float32 value. It is -// your responsibility not to call this method with a value greater -// than float32. -func FromFloat(val float32) Float32OrString { - return Float32OrString{Type: Float, FloatVal: val} -} - -// FromString creates a Float32OrString object with a string value. -func FromString(val string) Float32OrString { - return Float32OrString{Type: String, StrVal: val} -} - -// Parse the given string and try to convert it to a float32 before -// setting it as a string value. -func Parse(val string) Float32OrString { - f, err := strconv.ParseFloat(val, 32) - if err != nil { - return FromString(val) - } - return FromFloat(float32(f)) -} - -// UnmarshalJSON implements the json.Unmarshaller interface. -func (floatstr *Float32OrString) UnmarshalJSON(value []byte) error { - if value[0] == '"' { - floatstr.Type = String - return json.Unmarshal(value, &floatstr.StrVal) - } - floatstr.Type = Float - return json.Unmarshal(value, &floatstr.FloatVal) -} - -// MarshalJSON implements the json.Marshaller interface. -func (floatstr Float32OrString) MarshalJSON() ([]byte, error) { - switch floatstr.Type { - case Float: - return json.Marshal(floatstr.FloatVal) - case String: - return json.Marshal(floatstr.StrVal) - default: - return []byte{}, fmt.Errorf("impossible Float32OrString.Type") - } -} - -// String returns the string value, or the float value. -func (floatstr *Float32OrString) String() string { - if floatstr == nil { - return "" - } - if floatstr.Type == String { - return floatstr.StrVal - } - return strconv.FormatFloat(float64(floatstr.FloatValue()), 'E', -1, 32) -} - -// FloatValue returns the FloatVal if type float32, or if -// it is a String, will attempt a conversion to float32, -// returning 0 if a parsing error occurs. -func (floatstr *Float32OrString) FloatValue() float32 { - if floatstr.Type == String { - f, _ := strconv.ParseFloat(floatstr.StrVal, 32) - return float32(f) - } - return floatstr.FloatVal -} diff --git a/util/floatstr/floatstr_test.go b/util/floatstr/floatstr_test.go deleted file mode 100644 index ee25fbe..0000000 --- a/util/floatstr/floatstr_test.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package floatstr - -import ( - "encoding/json" - "reflect" - "testing" - - "k8s.io/apimachinery/pkg/util/yaml" -) - -func TestFromFloat(t *testing.T) { - i := FromFloat(93.93) - if i.Type != Float || i.FloatVal != 93.93 { - t.Errorf("Expected FloatVal=93.93, got %+v", i) - } -} - -func TestFromString(t *testing.T) { - i := FromString("76.76") - if i.Type != String || i.StrVal != "76.76" { - t.Errorf("Expected StrVal=\"76.76\", got %+v", i) - } -} - -type FloatOrStringHolder struct { - FOrS Float32OrString `json:"val"` -} - -func TestIntOrStringUnmarshalJSON(t *testing.T) { - cases := []struct { - input string - result Float32OrString - }{ - {"{\"val\": 123.123}", FromFloat(123.123)}, - {"{\"val\": \"123.123\"}", FromString("123.123")}, - } - - for _, c := range cases { - var result FloatOrStringHolder - if err := json.Unmarshal([]byte(c.input), &result); err != nil { - t.Errorf("Failed to unmarshal input '%v': %v", c.input, err) - } - if result.FOrS != c.result { - t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result) - } - } -} - -func TestIntOrStringMarshalJSON(t *testing.T) { - cases := []struct { - input Float32OrString - result string - }{ - {FromFloat(123.123), "{\"val\":123.123}"}, - {FromString("123.123"), "{\"val\":\"123.123\"}"}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - result, err := json.Marshal(&input) - if err != nil { - t.Errorf("Failed to marshal input '%v': %v", input, err) - } - if string(result) != c.result { - t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result)) - } - } -} - -func TestIntOrStringMarshalJSONUnmarshalYAML(t *testing.T) { - cases := []struct { - input Float32OrString - }{ - {FromFloat(123.123)}, - {FromString("123.123")}, - } - - for _, c := range cases { - input := FloatOrStringHolder{c.input} - jsonMarshalled, err := json.Marshal(&input) - if err != nil { - t.Errorf("1: Failed to marshal input: '%v': %v", input, err) - } - - var result FloatOrStringHolder - err = yaml.Unmarshal(jsonMarshalled, &result) - if err != nil { - t.Errorf("2: Failed to unmarshal '%+v': %v", string(jsonMarshalled), err) - } - - if !reflect.DeepEqual(input, result) { - t.Errorf("3: Failed to marshal input '%+v': got %+v", input, result) - } - } -} diff --git a/util/unmarshal.go b/util/unmarshal.go deleted file mode 100644 index d00e9d2..0000000 --- a/util/unmarshal.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2020 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "net/http" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "sync/atomic" - "time" - - "sigs.k8s.io/yaml" - - val "github.com/serverlessworkflow/sdk-go/v2/validator" -) - -// Kind ... -// +k8s:deepcopy-gen=false -type Kind interface { - KindValues() []string - String() string -} - -// TODO: Remove global variable -var HttpClient = http.Client{Timeout: time.Duration(1) * time.Second} - -// UnmarshalError ... -// +k8s:deepcopy-gen=false -type UnmarshalError struct { - err error - parameterName string - primitiveType reflect.Kind - objectType reflect.Kind -} - -func (e *UnmarshalError) Error() string { - if e.err == nil { - panic("unmarshalError fail") - } - - var syntaxErr *json.SyntaxError - var unmarshalTypeErr *json.UnmarshalTypeError - if errors.As(e.err, &syntaxErr) { - return fmt.Sprintf("%s has a syntax error %q", e.parameterName, syntaxErr.Error()) - - } else if errors.As(e.err, &unmarshalTypeErr) { - return e.unmarshalMessageError(unmarshalTypeErr) - } - - return e.err.Error() -} - -func (e *UnmarshalError) unmarshalMessageError(err *json.UnmarshalTypeError) string { - if err.Struct == "" && err.Field == "" { - primitiveTypeName := e.primitiveType.String() - - // in some cases the e.primitiveType might be invalid, one of the reasons is because it is nil - // default to string in that case - if e.primitiveType == reflect.Invalid { - primitiveTypeName = "string" - } - - var objectTypeName string - if e.objectType != reflect.Invalid { - switch e.objectType { - case reflect.Struct: - objectTypeName = "object" - case reflect.Map: - objectTypeName = "object" - case reflect.Slice: - objectTypeName = "array" - default: - objectTypeName = e.objectType.String() - } - } - return fmt.Sprintf("%s must be %s or %s", e.parameterName, primitiveTypeName, objectTypeName) - - } else if err.Struct != "" && err.Field != "" { - var primitiveTypeName string - value := reflect.New(err.Type) - if valKinds, ok := value.Elem().Interface().(val.Kind); ok { - values := valKinds.KindValues() - if len(values) <= 2 { - primitiveTypeName = strings.Join(values, " or ") - } else { - primitiveTypeName = fmt.Sprintf("%s, %s", strings.Join(values[:len(values)-2], ", "), strings.Join(values[len(values)-2:], " or ")) - } - } else { - primitiveTypeName = err.Type.Name() - } - - return fmt.Sprintf("%s.%s must be %s", e.parameterName, err.Field, primitiveTypeName) - } - - return err.Error() -} - -func LoadExternalResource(url string) (b []byte, err error) { - index := strings.Index(url, "://") - if index == -1 { - b, err = getBytesFromFile(url) - } else { - scheme := url[:index] - switch scheme { - case "http", "https": - b, err = getBytesFromHttp(url) - case "file": - b, err = getBytesFromFile(url[index+3:]) - default: - return nil, fmt.Errorf("unsupported scheme: %q", scheme) - } - } - if err != nil { - return - } - - // TODO: optimize this - // NOTE: In specification, we can declare independent definitions with another file format, so - // we must convert independently yaml source to json format data before unmarshal. - if !json.Valid(b) { - b, err = yaml.YAMLToJSON(b) - if err != nil { - return nil, err - } - return b, nil - } - - return b, nil -} - -func getBytesFromFile(path string) ([]byte, error) { - if WebAssembly() { - return nil, fmt.Errorf("unsupported open file") - } - - // if path is relative, search in include paths - if !filepath.IsAbs(path) { - paths := IncludePaths() - pathFound := false - for i := 0; i < len(paths) && !pathFound; i++ { - sn := filepath.Join(paths[i], path) - _, err := os.Stat(sn) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - return nil, err - } - } else { - path = sn - pathFound = true - } - } - if !pathFound { - return nil, fmt.Errorf("file not found: %q", path) - } - } - - return os.ReadFile(filepath.Clean(path)) -} - -func getBytesFromHttp(url string) ([]byte, error) { - req, err := http.NewRequest(http.MethodGet, url, nil) - if err != nil { - return nil, err - } - - resp, err := HttpClient.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - buf := new(bytes.Buffer) - if _, err = buf.ReadFrom(resp.Body); err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -// +k8s:deepcopy-gen=false -func UnmarshalObjectOrFile[U any](parameterName string, data []byte, valObject *U) error { - var valString string - err := UnmarshalPrimitiveOrObject(parameterName, data, &valString, valObject) - if err != nil || valString == "" { - return err - } - - // Assumes that the value inside `data` is a path to a known location. - // Returns the content of the file or a not nil error reference. - data, err = LoadExternalResource(valString) - if err != nil { - return err - } - - data = bytes.TrimSpace(data) - if data[0] != '{' && data[0] != '[' { - return errors.New("invalid external resource definition") - } - - if data[0] == '[' && parameterName != "auth" && parameterName != "secrets" { - return errors.New("invalid external resource definition") - } - - data = bytes.TrimSpace(data) - if data[0] == '{' && parameterName != "constants" && parameterName != "timeouts" && parameterName != "schema" { - extractData := map[string]json.RawMessage{} - err = json.Unmarshal(data, &extractData) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*valObject).Kind(), - } - } - - var ok bool - if data, ok = extractData[parameterName]; !ok { - return fmt.Errorf("external resource parameter not found: %q", parameterName) - } - } - - return UnmarshalObject(parameterName, data, valObject) -} - -func UnmarshalPrimitiveOrObject[T string | bool, U any](parameterName string, data []byte, valPrimitive *T, valStruct *U) error { - data = bytes.TrimSpace(data) - if len(data) == 0 { - // TODO: Normalize error messages - return fmt.Errorf("%s no bytes to unmarshal", parameterName) - } - - isObject := data[0] == '{' || data[0] == '[' - var err error - if isObject { - err = UnmarshalObject(parameterName, data, valStruct) - } else { - err = unmarshalPrimitive(parameterName, data, valPrimitive) - } - - var unmarshalError *UnmarshalError - if errors.As(err, &unmarshalError) { - unmarshalError.objectType = reflect.TypeOf(*valStruct).Kind() - unmarshalError.primitiveType = reflect.TypeOf(*valPrimitive).Kind() - } - - return err -} - -func unmarshalPrimitive[T string | bool](parameterName string, data []byte, value *T) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - primitiveType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -func UnmarshalObject[U any](parameterName string, data []byte, value *U) error { - if value == nil { - return nil - } - - err := json.Unmarshal(data, value) - if err != nil { - return &UnmarshalError{ - err: err, - parameterName: parameterName, - objectType: reflect.TypeOf(*value).Kind(), - } - } - - return nil -} - -var defaultIncludePaths atomic.Value - -func init() { - // No execute set include path to suport webassembly - if WebAssembly() { - return - } - - wd, err := os.Getwd() - if err != nil { - panic(err) - } - SetIncludePaths([]string{wd}) -} - -// IncludePaths will return the search path for non-absolute import file -func IncludePaths() []string { - return defaultIncludePaths.Load().([]string) -} - -// SetIncludePaths will update the search path for non-absolute import file -func SetIncludePaths(paths []string) { - for _, path := range paths { - if !filepath.IsAbs(path) { - panic(fmt.Errorf("%s must be an absolute file path", path)) - } - } - - defaultIncludePaths.Store(paths) -} - -func WebAssembly() bool { - return runtime.GOOS == "js" && runtime.GOARCH == "wasm" -} diff --git a/util/unmarshal_test.go b/util/unmarshal_test.go deleted file mode 100644 index f7051fb..0000000 --- a/util/unmarshal_test.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/serverlessworkflow/sdk-go/v2/test" -) - -func TestIncludePaths(t *testing.T) { - assert.NotNil(t, IncludePaths()) - assert.True(t, len(IncludePaths()) > 0) - - // update include paths - initialPaths := IncludePaths() - paths := []string{"/root", "/path"} - SetIncludePaths(paths) - assert.Equal(t, IncludePaths(), paths) - - assert.PanicsWithError(t, "1 must be an absolute file path", assert.PanicTestFunc(func() { - SetIncludePaths([]string{"1"}) - })) - - SetIncludePaths(initialPaths) -} - -func Test_loadExternalResource(t *testing.T) { - SetIncludePaths(append(IncludePaths(), filepath.Join(test.CurrentProjectPath()))) - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte("{}")) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - data, err := LoadExternalResource(server.URL + "/test.json") - assert.NoError(t, err) - assert.Equal(t, "{}", string(data)) - - data, err = LoadExternalResource("parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("file://../parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - data, err = LoadExternalResource("./parser/testdata/eventdefs.yml") - assert.NoError(t, err) - assert.Equal(t, "{\"events\":[{\"correlation\":[{\"contextAttributeName\":\"accountId\"}],\"name\":\"PaymentReceivedEvent\",\"source\":\"paymentEventSource\",\"type\":\"payment.receive\"},{\"kind\":\"produced\",\"name\":\"ConfirmationCompletedEvent\",\"type\":\"payment.confirmation\"}]}", string(data)) - - _, err = LoadExternalResource("ftp://test.yml") - assert.ErrorContains(t, err, "unsupported scheme: \"ftp\"") -} - -func Test_unmarshalObjectOrFile(t *testing.T) { - t.Run("httptest", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - type listStructString []structString - - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - switch req.URL.Path { - case "/test.json": - _, err := rw.Write([]byte(`{"listStructString":[{"fieldValue": "value"}]}`)) - assert.NoError(t, err) - default: - t.Failed() - } - })) - defer server.Close() - HttpClient = *server.Client() - - structValue := &structString{} - data := []byte(`"fieldValue": "value"`) - err := UnmarshalObjectOrFile("structString", data, structValue) - assert.Error(t, err) - assert.Equal(t, &structString{}, structValue) - - listStructValue := &listStructString{} - data = []byte(`[{"fieldValue": "value"}]`) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - - listStructValue = &listStructString{} - data = []byte(fmt.Sprintf(`"%s/test.json"`, server.URL)) - err = UnmarshalObjectOrFile("listStructString", data, listStructValue) - assert.NoError(t, err) - assert.Equal(t, listStructString{{FieldValue: "value"}}, *listStructValue) - }) -} - -func Test_primitiveOrMapType(t *testing.T) { - type dataMap map[string]json.RawMessage - - t.Run("unmarshal", func(t *testing.T) { - var valBool bool - valMap := &dataMap{} - data := []byte(`"value":true`) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`{value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.Error(t, err) - - valBool = false - valMap = &dataMap{} - data = []byte(`true`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.True(t, valBool) - - valString := "" - valMap = &dataMap{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valMap) - assert.NoError(t, err) - assert.Equal(t, &dataMap{}, valMap) - assert.Equal(t, `true`, valString) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value":true}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte("true")}) - assert.False(t, valBool) - - valBool = false - valMap = &dataMap{} - data = []byte(`{"value": "true"}`) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valMap) - assert.NoError(t, err) - assert.NotNil(t, valMap) - assert.Equal(t, valMap, &dataMap{"value": []byte(`"true"`)}) - assert.False(t, valBool) - }) - - t.Run("test personalized syntaxError error message", func(t *testing.T) { - type structString struct { - FieldValue string `json:"fieldValue"` - } - - var valString string - valStruct := &structString{} - data := []byte(`{"fieldValue": "value"`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"unexpected end of JSON input\"", err.Error()) - - data = []byte(`{\n "fieldValue": value\n}`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valString, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool has a syntax error \"invalid character '\\\\\\\\' looking for beginning of object key string\"", err.Error()) - // assert.Equal(t, `structBool value '{"fieldValue": value}' is not supported, it has a syntax error "invalid character 'v' looking for beginning of value"`, err.Error()) - }) - - t.Run("test personalized unmarshalTypeError error message", func(t *testing.T) { - type structBool struct { - FieldValue bool `json:"fieldValue"` - } - - var valBool bool - valStruct := &structBool{} - data := []byte(`{ - "fieldValue": "true" -}`) - err := UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool.fieldValue must be bool", err.Error()) - - valBool = false - valStruct = &structBool{} - data = []byte(`"true"`) - err = UnmarshalPrimitiveOrObject("structBool", data, &valBool, valStruct) - assert.Error(t, err) - assert.Equal(t, "structBool must be bool or object", err.Error()) - }) - - t.Run("check json with spaces", func(t *testing.T) { - var valBool bool - valStruct := &dataMap{} - data := []byte(` {"value": "true"} `) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valBool = false - valStruct = &dataMap{} - data = []byte(` true `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - - valString := "" - valStruct = &dataMap{} - data = []byte(` "true" `) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - }) - - t.Run("check tabs", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\t') + `"true"` + string('\t')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\t') + `true` + string('\t')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("check breakline", func(t *testing.T) { - valString := "" - valStruct := &dataMap{} - data := []byte(string('\n') + `"true"` + string('\n')) - err := UnmarshalPrimitiveOrObject("dataMap", data, &valString, valStruct) - assert.NoError(t, err) - - valBool := false - valStruct = &dataMap{} - data = []byte(string('\n') + `true` + string('\n')) - err = UnmarshalPrimitiveOrObject("dataMap", data, &valBool, valStruct) - assert.NoError(t, err) - }) - - t.Run("test recursivity and default value", func(t *testing.T) { - valStruct := &structBool{} - data := []byte(`{"fieldValue": false}`) - err := json.Unmarshal(data, valStruct) - assert.NoError(t, err) - assert.False(t, valStruct.FieldValue) - }) -} - -type structBool struct { - FieldValue bool `json:"fieldValue"` -} - -type structBoolUnmarshal structBool - -func (s *structBool) UnmarshalJSON(data []byte) error { - s.FieldValue = true - return UnmarshalObject("unmarshalJSON", data, (*structBoolUnmarshal)(s)) -} diff --git a/validate/validator.go b/validate/validator.go new file mode 100644 index 0000000..8749bff --- /dev/null +++ b/validate/validator.go @@ -0,0 +1,50 @@ +// Copyright 2024 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validate + +import ( + "github.com/serverlessworkflow/sdk-go/v4/internal/load" + "github.com/serverlessworkflow/sdk-go/v4/internal/validator" +) + +// FromFile parses the given Serverless Workflow file into the Workflow type. +func FromFile(path string) error { + root, fileBytes, err := load.FromFile(path) + if err != nil { + return err + } + + return validator.Valid(root, fileBytes) +} + +// FromYAMLSource parses the given Serverless Workflow YAML source into the Workflow type. +func FromYAMLSource(source []byte) error { + root, jsonBytes, err := load.FromYAMLSource(source) + if err != nil { + return err + } + + return validator.Valid(root, jsonBytes) +} + +// FromJSONSource parses the given Serverless Workflow JSON source into the Workflow type. +func FromJSONSource(source []byte) error { + root, jsonBytes, err := load.FromJSONSource(source) + if err != nil { + return err + } + + return validator.Valid(root, jsonBytes) +} diff --git a/model/event_data_filter_validator_test.go b/validate/validator_test.go similarity index 53% rename from model/event_data_filter_validator_test.go rename to validate/validator_test.go index 1bbbac9..f9663ad 100644 --- a/model/event_data_filter_validator_test.go +++ b/validate/validator_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Serverless Workflow Specification Authors +// Copyright 2024 The Serverless Workflow Specification Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,11 +12,29 @@ // See the License for the specific language governing permissions and // limitations under the License. -package model +package validate -import "testing" +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValidate(t *testing.T) { + source := []byte(` +document: + dsl: 1.0.0-alpha1 + namespace: examples + name: call-http-shorthand-endpoint + version: 1.0.0-alpha1 +do: + - test: + call: http + with: + method: get + endpoint: https://petstore.swagger.io/v2/pet/{petId} +`) + err := FromYAMLSource(source) + assert.NoError(t, err) -func TestEventDataFilterStateStructLevelValidation(t *testing.T) { - testCases := []ValidationCase{} - StructLevelValidationCtx(t, testCases) } diff --git a/validator/validator.go b/validator/validator.go deleted file mode 100644 index f241f84..0000000 --- a/validator/validator.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2021 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "context" - "errors" - "strconv" - - "github.com/relvacode/iso8601" - "github.com/sosodev/duration" - "k8s.io/apimachinery/pkg/util/intstr" - - validator "github.com/go-playground/validator/v10" -) - -// TODO: expose a better validation message. See: https://pkg.go.dev/gopkg.in/go-playground/validator.v8#section-documentation - -type Kind interface { - KindValues() []string - String() string -} - -var validate *validator.Validate - -func init() { - validate = validator.New() - - err := validate.RegisterValidationCtx("iso8601duration", validateISO8601TimeDurationFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidationCtx("iso8601datetime", validateISO8601DatetimeFunc) - if err != nil { - panic(err) - } - - err = validate.RegisterValidation("oneofkind", oneOfKind) - if err != nil { - panic(err) - } -} - -// GetValidator gets the default validator.Validate reference -func GetValidator() *validator.Validate { - return validate -} - -// ValidateISO8601TimeDuration validate the string is iso8601 duration format -func ValidateISO8601TimeDuration(s string) error { - if s == "" { - return errors.New("could not parse duration string") - } - _, err := duration.Parse(s) - if err != nil { - return errors.New("could not parse duration string") - } - return err -} - -func validateISO8601TimeDurationFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601TimeDuration(fl.Field().String()) - return err == nil -} - -// ValidateISO8601Datetime validate the string is iso8601 Datetime format -func ValidateISO8601Datetime(s string) error { - _, err := iso8601.ParseString(s) - return err -} - -func validateISO8601DatetimeFunc(_ context.Context, fl validator.FieldLevel) bool { - err := ValidateISO8601Datetime(fl.Field().String()) - return err == nil -} - -func oneOfKind(fl validator.FieldLevel) bool { - if val, ok := fl.Field().Interface().(Kind); ok { - for _, value := range val.KindValues() { - if value == val.String() { - return true - } - } - } - - return false -} - -func ValidateGt0IntStr(value *intstr.IntOrString) bool { - switch value.Type { - case intstr.Int: - if value.IntVal <= 0 { - return false - } - case intstr.String: - v, err := strconv.Atoi(value.StrVal) - if err != nil { - return false - } - - if v <= 0 { - return false - } - } - - return true -} diff --git a/validator/validator_test.go b/validator/validator_test.go deleted file mode 100644 index daab56a..0000000 --- a/validator/validator_test.go +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright 2022 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/util/intstr" -) - -func TestValidateISO8601TimeDuration(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "normal_all_designator", - s: "P3Y6M4DT12H30M5S", - err: ``, - }, - { - desp: "normal_second_designator", - s: "PT5S", - err: ``, - }, - { - desp: "fractional_second_designator", - s: "PT0.5S", - err: ``, - }, - { - desp: "empty value", - s: "", - err: `could not parse duration string`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601TimeDuration(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -func TestValidateISO8601Timestamp(t *testing.T) { - type testCase struct { - desp string - s string - err string - } - testCases := []testCase{ - { - desp: "workflow_spec_example", - s: "2021-11-05T08:15:30-05:00", - err: ``, - }, - { - desp: "datetime", - s: "2023-09-08T20:15:46+00:00", - err: ``, - }, - { - desp: "date", - s: "2023-09-08", - err: ``, - }, - { - desp: "time", - s: "13:15:33.074-07:00", - err: "iso8601: Unexpected character `:`", - }, - { - desp: "empty value", - s: "", - err: `iso8601: Cannot parse "": month 0 is not in range 1-12`, - }, - } - for _, tc := range testCases { - t.Run(tc.desp, func(t *testing.T) { - err := ValidateISO8601Datetime(tc.s) - - if tc.err != "" { - assert.Error(t, err) - assert.Regexp(t, tc.err, err) - return - } - - assert.NoError(t, err) - }) - } -} - -type testKind string - -func (k testKind) KindValues() []string { - return []string{"test1", "test2"} -} - -func (k testKind) String() string { - return string(k) -} - -type testKindInvalid string - -func (k testKindInvalid) AllValuesInvalid() []string { - return []string{"test1", "test2"} -} - -func (k testKindInvalid) String() string { - return string(k) -} - -func Test_oneOfKind(t *testing.T) { - validate := GetValidator() - - t.Run("kind without kindInvalid", func(t *testing.T) { - spec := struct { - f interface{} - t string - }{ - f: testKindInvalid("test1"), t: "oneofkind", - } - - errs := validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) - - t.Run("kind", func(t *testing.T) { - spec := struct { - f testKind - t string - }{ - f: testKind("test1"), t: "oneofkind", - } - errs := validate.Var(spec.f, spec.t) - assert.NoError(t, errs) - - spec = struct { - f testKind - t string - }{ - f: testKind("test3"), t: "oneofkind", - } - errs = validate.Var(spec.f, spec.t) - assert.Error(t, errs) - - }) -} - -func TestValidateIntStr(t *testing.T) { - - testCase := []struct { - Desp string - Test *intstr.IntOrString - Return bool - }{ - { - Desp: "success int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 1, - }, - Return: true, - }, - { - Desp: "success string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "1", - }, - Return: true, - }, - { - Desp: "fail int", - Test: &intstr.IntOrString{ - Type: intstr.Int, - IntVal: 0, - }, - Return: false, - }, - { - Desp: "fail string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "0", - }, - Return: false, - }, - { - Desp: "fail invalid string", - Test: &intstr.IntOrString{ - Type: intstr.String, - StrVal: "aa", - }, - Return: false, - }, - } - - for _, c := range testCase { - t.Run(c.Desp, func(t *testing.T) { - valid := ValidateGt0IntStr(c.Test) - assert.Equal(t, c.Return, valid) - }) - } -} diff --git a/validator/workflow.go b/validator/workflow.go deleted file mode 100644 index d5be7b5..0000000 --- a/validator/workflow.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2023 The Serverless Workflow Specification Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "errors" - "fmt" - "reflect" - "strings" - - validator "github.com/go-playground/validator/v10" -) - -const ( - TagExists string = "exists" - TagRequired string = "required" - TagExclusive string = "exclusive" - - TagRecursiveState string = "recursivestate" - - // States referenced by compensatedBy (as well as any other states that they transition to) must obey following rules: - TagTransitionMainWorkflow string = "transtionmainworkflow" // They should not have any incoming transitions (should not be part of the main workflow control-flow logic) - TagCompensatedbyEventState string = "compensatedbyeventstate" // They cannot be an event state - TagRecursiveCompensation string = "recursivecompensation" // They cannot themselves set their compensatedBy property to true (compensation is not recursive) - TagCompensatedby string = "compensatedby" // They must define the usedForCompensation property and set it to true - TagTransitionUseForCompensation string = "transitionusedforcompensation" // They can transition only to states which also have their usedForCompensation property and set to true -) - -type WorkflowErrors []error - -func (e WorkflowErrors) Error() string { - errors := []string{} - for _, err := range []error(e) { - errors = append(errors, err.Error()) - } - return strings.Join(errors, "\n") -} - -func WorkflowError(err error) error { - if err == nil { - return nil - } - - var invalidErr *validator.InvalidValidationError - if errors.As(err, &invalidErr) { - return err - } - - var validationErrors validator.ValidationErrors - if !errors.As(err, &validationErrors) { - return err - } - - removeNamespace := []string{ - "BaseWorkflow", - "BaseState", - "OperationState", - } - - workflowErrors := []error{} - for _, err := range validationErrors { - // normalize namespace - namespaceList := strings.Split(err.Namespace(), ".") - normalizedNamespaceList := []string{} - for i := range namespaceList { - part := namespaceList[i] - if !contains(removeNamespace, part) { - part := strings.ToLower(part[:1]) + part[1:] - normalizedNamespaceList = append(normalizedNamespaceList, part) - } - } - namespace := strings.Join(normalizedNamespaceList, ".") - - switch err.Tag() { - case "unique": - if err.Param() == "" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate value", namespace)) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s has duplicate %q", namespace, strings.ToLower(err.Param()))) - } - case "min": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must have the minimum %s", namespace, err.Param())) - case "required_without": - if namespace == "workflow.iD" { - workflowErrors = append(workflowErrors, errors.New("workflow.id required when \"workflow.key\" is not defined")) - } else if namespace == "workflow.key" { - workflowErrors = append(workflowErrors, errors.New("workflow.key required when \"workflow.id\" is not defined")) - } else if err.StructField() == "FunctionRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s required when \"eventRef\" or \"subFlowRef\" is not defined", namespace)) - } else { - workflowErrors = append(workflowErrors, err) - } - case "oneofkind": - value := reflect.New(err.Type()).Elem().Interface().(Kind) - workflowErrors = append(workflowErrors, fmt.Errorf("%s need by one of %s", namespace, value.KindValues())) - case "gt0": - workflowErrors = append(workflowErrors, fmt.Errorf("%s must be greater than 0", namespace)) - case TagExists: - workflowErrors = append(workflowErrors, fmt.Errorf("%s don't exist %q", namespace, err.Value())) - case TagRequired: - workflowErrors = append(workflowErrors, fmt.Errorf("%s is required", namespace)) - case TagExclusive: - if err.StructField() == "ErrorRef" { - workflowErrors = append(workflowErrors, fmt.Errorf("%s or %s are exclusive", namespace, replaceLastNamespace(namespace, "errorRefs"))) - } else { - workflowErrors = append(workflowErrors, fmt.Errorf("%s exclusive", namespace)) - } - case TagCompensatedby: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is not defined as usedForCompensation", namespace, err.Value())) - case TagCompensatedbyEventState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation and cannot be an event state", namespace, err.Value())) - case TagRecursiveCompensation: - workflowErrors = append(workflowErrors, fmt.Errorf("%s = %q is defined as usedForCompensation (cannot themselves set their compensatedBy)", namespace, err.Value())) - case TagRecursiveState: - workflowErrors = append(workflowErrors, fmt.Errorf("%s can't no be recursive %q", namespace, strings.ToLower(err.Param()))) - case TagISO8601Duration: - workflowErrors = append(workflowErrors, fmt.Errorf("%s invalid iso8601 duration %q", namespace, err.Value())) - default: - workflowErrors = append(workflowErrors, err) - } - } - - return WorkflowErrors(workflowErrors) -} - -func contains(a []string, x string) bool { - for _, n := range a { - if x == n { - return true - } - } - return false -} - -func replaceLastNamespace(namespace, replace string) string { - index := strings.LastIndex(namespace, ".") - if index == -1 { - return namespace - } - - return fmt.Sprintf("%s.%s", namespace[:index], replace) -} From 5a06f39d9eeb5db6ef432b23da4c1cc2e6f8efc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Sat, 24 Aug 2024 16:57:15 -0300 Subject: [PATCH 2/6] update dsl MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- internal/dsl/dsl.go | 231 +++++++++++++++++--------------------------- 1 file changed, 88 insertions(+), 143 deletions(-) diff --git a/internal/dsl/dsl.go b/internal/dsl/dsl.go index 0f579dd..443fae8 100644 --- a/internal/dsl/dsl.go +++ b/internal/dsl/dsl.go @@ -71,7 +71,6 @@ properties: type: array items: type: object - title: ExtensionItem minProperties: 1 maxProperties: 1 additionalProperties: @@ -123,7 +122,6 @@ $defs: type: array items: type: object - title: TaskItem minProperties: 1 maxProperties: 1 additionalProperties: @@ -176,7 +174,6 @@ $defs: type: string const: asyncapi with: - title: WithAsyncAPI type: object properties: document: @@ -198,8 +195,10 @@ $defs: type: object description: The payload to call the AsyncAPI operation with, if any. authentication: - $ref: '#/$defs/referenceableAuthenticationPolicy' description: The authentication policy, if any, to use when calling the AsyncAPI operation. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string required: [ document, operationRef ] additionalProperties: false description: Defines the AsyncAPI call to perform. @@ -213,7 +212,6 @@ $defs: type: string const: grpc with: - title: WithGRPC type: object properties: proto: @@ -235,8 +233,10 @@ $defs: max: 65535 description: The port number of the GRPC service to call. authentication: - $ref: '#/$defs/referenceableAuthenticationPolicy' description: The endpoint's authentication policy, if any. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string required: [ name, host ] method: type: string @@ -258,7 +258,6 @@ $defs: type: string const: http with: - title: WithHTTP type: object properties: method: @@ -292,7 +291,6 @@ $defs: type: string const: openapi with: - title: WithOpenAPI type: object properties: document: @@ -306,8 +304,10 @@ $defs: additionalProperties: true description: A name/value mapping of the parameters of the OpenAPI operation to call. authentication: - $ref: '#/$defs/referenceableAuthenticationPolicy' description: The authentication policy, if any, to use when calling the OpenAPI operation. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string output: type: string enum: [ raw, content, response ] @@ -458,8 +458,7 @@ $defs: run: type: object oneOf: - - title: RunContainer - properties: + - properties: container: type: object properties: @@ -476,14 +475,12 @@ $defs: type: object description: The container's volume mappings, if any. environment: - title: ContainerEnvironment type: object description: A key/value mapping of the environment variables, if any, to use when running the configured process. required: [ image ] required: [ container ] description: Enables the execution of external processes encapsulated within a containerized environment. - - title: RunScript - properties: + - properties: script: type: object properties: @@ -491,19 +488,16 @@ $defs: type: string description: The language of the script to run. environment: - title: ScriptEnvironment type: object additionalProperties: true description: A key/value mapping of the environment variables, if any, to use when running the configured process. oneOf: - - title: ScriptInline - properties: + - properties: code: type: string required: [ code ] description: The script's code. - - title: ScriptExternal - properties: + - properties: source: $ref: '#/$defs/externalResource' description: The script's resource. @@ -511,8 +505,7 @@ $defs: required: [ language ] required: [ script ] description: Enables the execution of custom scripts or code within a workflow, empowering workflows to perform specialized logic, data processing, or integration tasks by executing user-defined scripts written in various programming languages. - - title: RunShell - properties: + - properties: shell: type: object properties: @@ -520,22 +513,18 @@ $defs: type: string description: The shell command to run. arguments: - title: ShellArguments type: object additionalProperties: true description: A list of the arguments of the shell command to run. environment: - title: ShellEnvironment type: object additionalProperties: true description: A key/value mapping of the environment variables, if any, to use when running the configured process. required: [ command ] required: [ shell ] description: Enables the execution of shell commands within a workflow, enabling workflows to interact with the underlying operating system and perform system-level operations, such as file manipulation, environment configuration, or system administration tasks. - - title: RunWokflow - properties: + - properties: workflow: - title: RunWorkflowDescriptor type: object properties: namespace: @@ -549,7 +538,6 @@ $defs: default: latest description: The version of the workflow to run. Defaults to latest input: - title: WorkflowInput type: object additionalProperties: true description: The data, if any, to pass as input to the workflow to execute. The value should be validated against the target workflow's input schema, if specified. @@ -582,10 +570,8 @@ $defs: type: object minProperties: 1 maxProperties: 1 - title: SwitchItem additionalProperties: type: object - title: SwitchCase properties: name: type: string @@ -610,7 +596,6 @@ $defs: type: object properties: errors: - title: CatchErrors type: object as: type: string @@ -644,104 +629,76 @@ $defs: enum: [ continue, exit, end ] default: continue - type: string - referenceableAuthenticationPolicy: - type: object - oneOf: - - title: AuthenticationPolicyReference - properties: - use: - type: string - minLength: 1 - description: The name of the authentication policy to use - required: [use] - - $ref: '#/$defs/authenticationPolicy' - secretBasedAuthenticationPolicy: - type: object - properties: - use: - type: string - minLength: 1 - description: The name of the authentication policy to use - required: [use] authenticationPolicy: type: object oneOf: - - title: BasicAuthenticationPolicy - properties: + - properties: basic: type: object - oneOf: - - properties: - username: - type: string - description: The username to use. - password: - type: string - description: The password to use. - required: [ username, password ] - - $ref: '#/$defs/secretBasedAuthenticationPolicy' + properties: + username: + type: string + description: The username to use. + password: + type: string + description: The password to use. + required: [ username, password ] required: [ basic ] description: Use basic authentication. - - title: BearerAuthenticationPolicy - properties: + - properties: bearer: type: object - oneOf: - - properties: - token: - type: string - description: The bearer token to use. - required: [ token ] - - $ref: '#/$defs/secretBasedAuthenticationPolicy' + properties: + token: + type: string + description: The bearer token to use. + required: [ token ] required: [ bearer ] description: Use bearer authentication. - - title: OAuth2AuthenticationPolicy - properties: + - properties: oauth2: type: object - oneOf: - - properties: - authority: - type: string - format: uri - description: The URI that references the OAuth2 authority to use. - grant: - type: string - description: The grant type to use. - client: - type: object - properties: - id: - type: string - description: The client id to use. - secret: - type: string - description: The client secret to use, if any. - required: [ id ] - scopes: - type: array - items: - type: string - description: The scopes, if any, to request the token for. - audiences: - type: array - items: - type: string - description: The audiences, if any, to request the token for. - username: + properties: + authority: + type: string + format: uri + description: The URI that references the OAuth2 authority to use. + grant: + type: string + description: The grant type to use. + client: + type: object + properties: + id: type: string - description: The username to use. Used only if the grant type is Password. - password: + description: The client id to use. + secret: type: string - description: The password to use. Used only if the grant type is Password. - subject: - $ref: '#/$defs/oauth2Token' - description: The security token that represents the identity of the party on behalf of whom the request is being made. - actor: - $ref: '#/$defs/oauth2Token' - description: The security token that represents the identity of the acting party. - required: [ authority, grant, client ] - - $ref: '#/$defs/secretBasedAuthenticationPolicy' + description: The client secret to use, if any. + required: [ id ] + scopes: + type: array + items: + type: string + description: The scopes, if any, to request the token for. + audiences: + type: array + items: + type: string + description: The audiences, if any, to request the token for. + username: + type: string + description: The username to use. Used only if the grant type is Password. + password: + type: string + description: The password to use. Used only if the grant type is Password. + subject: + $ref: '#/$defs/oauth2Token' + description: The security token that represents the identity of the party on behalf of whom the request is being made. + actor: + $ref: '#/$defs/oauth2Token' + description: The security token that represents the identity of the acting party. + required: [ authority, grant, client ] required: [ oauth2 ] description: Use OAUTH2 authentication. description: Defines an authentication policy. @@ -804,30 +761,29 @@ $defs: format: uri-template description: The endpoint's URI. authentication: - $ref: '#/$defs/referenceableAuthenticationPolicy' description: The authentication policy to use. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string required: [ uri ] eventConsumptionStrategy: type: object oneOf: - - title: AllEventConsumptionStrategy - properties: + - properties: all: type: array items: $ref: '#/$defs/eventFilter' description: A list containing all the events that must be consumed. required: [ all ] - - title: AnyEventConsumptionStrategy - properties: + - properties: any: type: array items: $ref: '#/$defs/eventFilter' description: A list containing any of the events to consume. required: [ any ] - - title: OneEventConsumptionStrategy - properties: + - properties: one: $ref: '#/$defs/eventFilter' description: The single event to consume. @@ -836,7 +792,6 @@ $defs: type: object properties: with: - title: WithEvent type: object minProperties: 1 properties: @@ -897,16 +852,17 @@ $defs: oneOf: - type: string format: uri - - title: ExternalResourceURI - type: object + - type: object properties: uri: type: string format: uri description: The endpoint's URI. authentication: - $ref: '#/$defs/referenceableAuthenticationPolicy' description: The authentication policy to use. + oneOf: + - $ref: '#/$defs/authenticationPolicy' + - type: string name: type: string description: The external resource's name, if any. @@ -918,9 +874,7 @@ $defs: $ref: '#/$defs/schema' description: The schema used to describe and validate the input of the workflow or task. from: - oneOf: - - type: string - - type: object + type: string description: A runtime expression, if any, used to mutate and/or filter the input of the workflow or task. description: Configures the input of a workflow or task. output: @@ -930,9 +884,7 @@ $defs: $ref: '#/$defs/schema' description: The schema used to describe and validate the output of the workflow or task. as: - oneOf: - - type: string - - type: object + type: string description: A runtime expression, if any, used to mutate and/or filter the output of the workflow or task. description: Configures the output of a workflow or task. export: @@ -942,9 +894,7 @@ $defs: $ref: '#/$defs/schema' description: The schema used to describe and validate the workflow context. as: - oneOf: - - type: string - - type: object + type: string description: A runtime expression, if any, used to export the output data to the context. description: Set the content of the context. retryPolicy: @@ -962,20 +912,17 @@ $defs: backoff: type: object oneOf: - - title: ConstantBackoff - properties: + - properties: constant: type: object description: The definition of the constant backoff to use, if any. required: [ constant ] - - title: ExponentialBackOff - properties: + - properties: exponential: type: object description: The definition of the exponential backoff to use, if any. required: [ exponential ] - - title: LinearBackoff - properties: + - properties: linear: type: object description: The definition of the linear backoff to use, if any. @@ -1017,13 +964,11 @@ $defs: default: json description: The schema's format. Defaults to 'json'. The (optional) version of the format can be set using ` + "{format}:{version}" + `. oneOf: - - title: SchemaInline - properties: + - properties: document: description: The schema's inline definition. required: [ document ] - - title: SchemaExternal - properties: + - properties: resource: $ref: '#/$defs/externalResource' description: The schema's external resource. From 02b4ff12428506d2aeb5745125275fc9ec1e9a74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Sat, 24 Aug 2024 17:04:57 -0300 Subject: [PATCH 3/6] remove go.mod toolcahin MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- go.mod | 2 -- 1 file changed, 2 deletions(-) diff --git a/go.mod b/go.mod index 3e30314..dac12e9 100644 --- a/go.mod +++ b/go.mod @@ -2,8 +2,6 @@ module github.com/serverlessworkflow/sdk-go/v4 go 1.21 -toolchain go1.23.0 - require ( github.com/pkg/errors v0.9.1 github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 From b864bb40b8d0f1b6dd6ca809eacf6ad18730d0b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Wed, 18 Sep 2024 17:13:18 -0300 Subject: [PATCH 4/6] add version in constant MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- builder/document.go | 7 +++++-- internal/dsl/dsl.go | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/builder/document.go b/builder/document.go index abb76eb..ae2c036 100644 --- a/builder/document.go +++ b/builder/document.go @@ -14,7 +14,10 @@ package builder -import "github.com/serverlessworkflow/sdk-go/v4/graph" +import ( + "github.com/serverlessworkflow/sdk-go/v4/graph" + "github.com/serverlessworkflow/sdk-go/v4/internal/dsl" +) type DocumentBuilder struct { root *graph.Node @@ -68,6 +71,6 @@ func NewDocumentBuilder(root *graph.Node) *DocumentBuilder { documentBuilder := &DocumentBuilder{ root: root, } - documentBuilder.SetDSL("1.0.0-alpha1") + documentBuilder.SetDSL(dsl.DSLVersion) return documentBuilder } diff --git a/internal/dsl/dsl.go b/internal/dsl/dsl.go index 443fae8..496e8b3 100644 --- a/internal/dsl/dsl.go +++ b/internal/dsl/dsl.go @@ -14,6 +14,7 @@ package dsl +const DSLVersion = "1.0.0-alpha1" const DSLSpec = ` $id: https://serverlessworkflow.io/schemas/1.0.0-alpha1/workflow.yaml $schema: https://json-schema.org/draft/2020-12/schema From 047521c30862aebeb30bb08b9ebb60c87abffc4c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Wed, 18 Sep 2024 17:17:01 -0300 Subject: [PATCH 5/6] change log fatal to panic MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- graph/graph.go | 4 ++-- graph/unmarshal.go | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/graph/graph.go b/graph/graph.go index fe6e3d0..e6a04aa 100644 --- a/graph/graph.go +++ b/graph/graph.go @@ -67,7 +67,7 @@ func (n *Node) MarshalJSON() ([]byte, error) { func (n *Node) Edge(name string) *Node { if n.HasValue() { - log.Fatal("value alredy defined, execute clear first") + log.Panic("value already defined, execute clear first") } if _, ok := n.edges[name]; !ok { newNode := NewNode() @@ -100,7 +100,7 @@ func (n *Node) SetBool(value bool) *Node { func (n *Node) setValue(value any) { if len(n.edges) > 0 { - log.Fatal("alredy defined edges, execute clear fist") + log.Panic("already defined edges, execute clear fist") } n.value = value } diff --git a/graph/unmarshal.go b/graph/unmarshal.go index d9fc23c..2fbfc12 100644 --- a/graph/unmarshal.go +++ b/graph/unmarshal.go @@ -103,7 +103,6 @@ func loadExternalResource(url string) (b []byte, err error) { return } - // TODO: optimize this // NOTE: In specification, we can declare independent definitions with another file format, so // we must convert independently yaml source to json format data before unmarshal. if !json.Valid(b) { From a791fe1034979330d2753023a4fb34b5175f6a29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20R=2E=20de=20Miranda?= Date: Sun, 29 Sep 2024 09:26:18 -0300 Subject: [PATCH 6/6] generate structs based on jsonspec for kubernetes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: AndrĂ© R. de Miranda --- go.mod | 14 +- go.sum | 38 +- hack/go-jsonschema.sh | 42 + kubernetes/k8s_workflow_integration.go | 4 +- kubernetes/spec/doc.go | 4 + kubernetes/spec/schema.json | 2246 ++++++++++++++++++++++++ kubernetes/spec/spec.go | 893 ++++++++++ 7 files changed, 3237 insertions(+), 4 deletions(-) create mode 100755 hack/go-jsonschema.sh create mode 100644 kubernetes/spec/doc.go create mode 100644 kubernetes/spec/schema.json create mode 100644 kubernetes/spec/spec.go diff --git a/go.mod b/go.mod index dac12e9..c0b669c 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,11 @@ module github.com/serverlessworkflow/sdk-go/v4 -go 1.21 +go 1.22 + +toolchain go1.23.1 require ( + github.com/atombender/go-jsonschema v0.16.0 github.com/pkg/errors v0.9.1 github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 github.com/stretchr/testify v1.8.0 @@ -15,17 +18,26 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fatih/color v1.16.0 // indirect github.com/go-logr/logr v1.2.3 // indirect + github.com/goccy/go-yaml v1.11.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/sanity-io/litter v1.5.5 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect golang.org/x/net v0.18.0 // indirect + golang.org/x/sys v0.19.0 // indirect golang.org/x/text v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect k8s.io/klog/v2 v2.80.2-0.20221028030830-9ae4992afb54 // indirect diff --git a/go.sum b/go.sum index a0e2b00..dc7737d 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,24 @@ +github.com/atombender/go-jsonschema v0.16.0 h1:1C6jMVzAQ4RZCBwGQYMEVZvjSBdKUw/7arkhHPS0ldg= +github.com/atombender/go-jsonschema v0.16.0/go.mod h1:qvHiMeC+Obu1QJTtD+rZGogD+Nn4QCztDJ0UNF8dBfs= +github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/goccy/go-yaml v1.11.3 h1:B3W9IdWbvrUu2OYQGwvU1nZtvMQJPBKgBUuweJjLj6I= +github.com/goccy/go-yaml v1.11.3/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= @@ -19,6 +32,15 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -32,14 +54,18 @@ github.com/onsi/gomega v1.24.1 h1:KORJXNNTzJXzu4ScJWssJfJMnJ+2QJqhoQSRwNlze9E= github.com/onsi/gomega v1.24.1/go.mod h1:3AOiACssS3/MajrniINInwbfOOtfZvplPzuRSmvt1jM= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= +github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw= github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= @@ -57,6 +83,10 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= +golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -79,8 +109,10 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= @@ -93,6 +125,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/hack/go-jsonschema.sh b/hack/go-jsonschema.sh new file mode 100755 index 0000000..56f8b39 --- /dev/null +++ b/hack/go-jsonschema.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2022 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# retrieved from https://github.com/kubernetes/code-generator/blob/master/generate-internal-groups.sh +# and adapted to only install and run the deepcopy-gen + +set -o errexit +set -o nounset +set -o pipefail + +SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. +echo "Script root is $SCRIPT_ROOT" + + +# To support running this script from anywhere, first cd into this directory, +# and then install with forced module mode on and fully qualified name. +# make sure your GOPATH env is properly set. +# it will go under $GOPATH/bin +cd "$(dirname "${0}")" +GO_JSONSCHEMA_VERSION="v0.16.0" +GO111MODULE=on go install github.com/atombender/go-jsonschema@${GO_JSONSCHEMA_VERSION} + +echo "Generating go structs" +mkdir -p ../kubernetes/spec +export GO111MODULE=on +"${GOPATH}/bin/go-jsonschema" \ + -p spec \ + --tags json \ + --struct-name-from-title \ + ../kubernetes/spec/schema.json > ../kubernetes/spec/spec.go diff --git a/kubernetes/k8s_workflow_integration.go b/kubernetes/k8s_workflow_integration.go index 3eed4fa..6d6328b 100644 --- a/kubernetes/k8s_workflow_integration.go +++ b/kubernetes/k8s_workflow_integration.go @@ -17,6 +17,8 @@ package kubernetes import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" + + "github.com/serverlessworkflow/sdk-go/v4/kubernetes/spec" ) // This package provides a very simple api for kubernetes operator to test the integration @@ -36,7 +38,7 @@ import ( // ServerlessWorkflowSpec defines a base API for integration test with operator-sdk type ServerlessWorkflowSpec struct { - // model.Workflow `json:",inline"` + spec.SchemaJson `json:",inline"` } // ServerlessWorkflow ... diff --git a/kubernetes/spec/doc.go b/kubernetes/spec/doc.go new file mode 100644 index 0000000..fd4fc0f --- /dev/null +++ b/kubernetes/spec/doc.go @@ -0,0 +1,4 @@ +package spec + +// +k8s:deepcopy-gen=package +// +k8s:deepcopy-gen:nonpointer-interfaces=true diff --git a/kubernetes/spec/schema.json b/kubernetes/spec/schema.json new file mode 100644 index 0000000..d49745c --- /dev/null +++ b/kubernetes/spec/schema.json @@ -0,0 +1,2246 @@ +{ + "$id": "https://serverlessworkflow.io/schemas/1.0.0/workflow.yaml", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "Serverless Workflow DSL - Workflow Schema.", + "type": "object", + "required": [ + "document", + "do" + ], + "properties": { + "document": { + "type": "object", + "title": "Document", + "description": "Documents the workflow.", + "unevaluatedProperties": false, + "properties": { + "dsl": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "title": "WorkflowDSL", + "description": "The version of the DSL used by the workflow." + }, + "namespace": { + "type": "string", + "pattern": "^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$", + "title": "WorkflowNamespace", + "description": "The workflow's namespace." + }, + "name": { + "type": "string", + "pattern": "^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$", + "title": "WorkflowName", + "description": "The workflow's name." + }, + "version": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "title": "WorkflowVersion", + "description": "The workflow's semantic version." + }, + "title": { + "type": "string", + "title": "WorkflowTitle", + "description": "The workflow's title." + }, + "summary": { + "type": "string", + "title": "WorkflowSummary", + "description": "The workflow's Markdown summary." + }, + "tags": { + "type": "object", + "title": "WorkflowTags", + "description": "A key/value mapping of the workflow's tags, if any.", + "additionalProperties": true + }, + "metadata": { + "type": "object", + "title": "WorkflowMetadata", + "description": "Holds additional information about the workflow.", + "additionalProperties": true + } + }, + "required": [ + "dsl", + "namespace", + "name", + "version" + ] + }, + "input": { + "$ref": "#/$defs/input", + "title": "Input", + "description": "Configures the workflow's input." + }, + "use": { + "type": "object", + "title": "Use", + "description": "Defines the workflow's reusable components.", + "unevaluatedProperties": false, + "properties": { + "authentications": { + "type": "object", + "title": "UseAuthentications", + "description": "The workflow's reusable authentication policies.", + "additionalProperties": { + "$ref": "#/$defs/authenticationPolicy" + } + }, + "errors": { + "type": "object", + "title": "UseErrors", + "description": "The workflow's reusable errors.", + "additionalProperties": { + "$ref": "#/$defs/error" + } + }, + "extensions": { + "type": "array", + "title": "UseExtensions", + "description": "The workflow's extensions.", + "items": { + "type": "object", + "title": "ExtensionItem", + "minProperties": 1, + "maxProperties": 1, + "additionalProperties": { + "$ref": "#/$defs/extension" + } + } + }, + "functions": { + "type": "object", + "title": "UseFunctions", + "description": "The workflow's reusable functions.", + "additionalProperties": { + "$ref": "#/$defs/task" + } + }, + "retries": { + "type": "object", + "title": "UseRetries", + "description": "The workflow's reusable retry policies.", + "additionalProperties": { + "$ref": "#/$defs/retryPolicy" + } + }, + "secrets": { + "type": "array", + "title": "UseSecrets", + "description": "The workflow's reusable secrets.", + "items": { + "type": "string", + "description": "The workflow's secrets." + } + }, + "timeouts": { + "type": "object", + "title": "UseTimeouts", + "description": "The workflow's reusable timeouts.", + "additionalProperties": { + "$ref": "#/$defs/timeout" + } + } + } + }, + "do": { + "$ref": "#/$defs/taskList", + "title": "Do", + "description": "Defines the task(s) the workflow must perform." + }, + "timeout": { + "oneOf": [ + { + "$ref": "#/$defs/timeout", + "title": "TimeoutDefinition", + "description": "The workflow's timeout configuration, if any." + }, + { + "type": "string", + "title": "TimeoutReference", + "description": "The name of the workflow's timeout, if any." + } + ] + }, + "output": { + "$ref": "#/$defs/output", + "title": "Output", + "description": "Configures the workflow's output." + }, + "schedule": { + "type": "object", + "title": "Schedule", + "description": "Schedules the workflow.", + "unevaluatedProperties": false, + "properties": { + "every": { + "$ref": "#/$defs/duration", + "title": "ScheduleEvery", + "description": "Specifies the duration of the interval at which the workflow should be executed." + }, + "cron": { + "type": "string", + "title": "ScheduleCron", + "description": "Specifies the schedule using a cron expression, e.g., '0 0 * * *' for daily at midnight." + }, + "after": { + "$ref": "#/$defs/duration", + "title": "ScheduleAfter", + "description": "Specifies a delay duration that the workflow must wait before starting again after it completes." + }, + "on": { + "$ref": "#/$defs/eventConsumptionStrategy", + "title": "ScheduleOn", + "description": "Specifies the events that trigger the workflow execution." + } + } + } + }, + "$defs": { + "taskList": { + "title": "TaskList", + "description": "List of named tasks to perform.", + "type": "array", + "items": { + "type": "object", + "title": "TaskItem", + "minProperties": 1, + "maxProperties": 1, + "additionalProperties": { + "$ref": "#/$defs/task" + } + } + }, + "taskBase": { + "type": "object", + "title": "TaskBase", + "description": "An object inherited by all tasks.", + "properties": { + "if": { + "type": "string", + "title": "TaskBaseIf", + "description": "A runtime expression, if any, used to determine whether or not the task should be run." + }, + "input": { + "$ref": "#/$defs/input", + "title": "TaskBaseInput", + "description": "Configure the task's input." + }, + "output": { + "$ref": "#/$defs/output", + "title": "TaskBaseOutput", + "description": "Configure the task's output." + }, + "export": { + "$ref": "#/$defs/export", + "title": "TaskBaseExport", + "description": "Export task output to context." + }, + "timeout": { + "oneOf": [ + { + "$ref": "#/$defs/timeout", + "title": "TaskTimeoutDefinition", + "description": "The task's timeout configuration, if any." + }, + { + "type": "string", + "title": "TaskTimeoutReference", + "description": "The name of the task's timeout, if any." + } + ] + }, + "then": { + "$ref": "#/$defs/flowDirective", + "title": "TaskBaseThen", + "description": "The flow directive to be performed upon completion of the task." + }, + "metadata": { + "type": "object", + "title": "TaskMetadata", + "description": "Holds additional information about the task.", + "additionalProperties": true + } + } + }, + "task": { + "title": "Task", + "description": "A discrete unit of work that contributes to achieving the overall objectives defined by the workflow.", + "unevaluatedProperties": false, + "oneOf": [ + { + "$ref": "#/$defs/callTask" + }, + { + "$ref": "#/$defs/doTask" + }, + { + "$ref": "#/$defs/forkTask" + }, + { + "$ref": "#/$defs/emitTask" + }, + { + "$ref": "#/$defs/forTask" + }, + { + "$ref": "#/$defs/listenTask" + }, + { + "$ref": "#/$defs/raiseTask" + }, + { + "$ref": "#/$defs/runTask" + }, + { + "$ref": "#/$defs/setTask" + }, + { + "$ref": "#/$defs/switchTask" + }, + { + "$ref": "#/$defs/tryTask" + }, + { + "$ref": "#/$defs/waitTask" + } + ] + }, + "callTask": { + "title": "CallTask", + "description": "Defines the call to perform.", + "oneOf": [ + { + "title": "CallAsyncAPI", + "description": "Defines the AsyncAPI call to perform.", + "$ref": "#/$defs/taskBase", + "type": "object", + "required": [ + "call", + "with" + ], + "unevaluatedProperties": false, + "properties": { + "call": { + "type": "string", + "const": "asyncapi" + }, + "with": { + "type": "object", + "title": "AsyncApiArguments", + "description": "The Async API call arguments.", + "properties": { + "document": { + "$ref": "#/$defs/externalResource", + "title": "WithAsyncAPIDocument", + "description": "The document that defines the AsyncAPI operation to call." + }, + "operationRef": { + "type": "string", + "title": "WithAsyncAPIOperation", + "description": "A reference to the AsyncAPI operation to call." + }, + "server": { + "type": "string", + "title": "WithAsyncAPIServer", + "description": "A a reference to the server to call the specified AsyncAPI operation on. If not set, default to the first server matching the operation's channel." + }, + "message": { + "type": "string", + "title": "WithAsyncAPIMessage", + "description": "The name of the message to use. If not set, defaults to the first message defined by the operation." + }, + "binding": { + "type": "string", + "title": "WithAsyncAPIBinding", + "description": "The name of the binding to use. If not set, defaults to the first binding defined by the operation." + }, + "payload": { + "type": "object", + "title": "WithAsyncAPIPayload", + "description": "The payload to call the AsyncAPI operation with, if any." + }, + "authentication": { + "$ref": "#/$defs/referenceableAuthenticationPolicy", + "title": "WithAsyncAPIAuthentication", + "description": "The authentication policy, if any, to use when calling the AsyncAPI operation." + } + }, + "required": [ + "document", + "operationRef" + ], + "unevaluatedProperties": false + } + } + }, + { + "title": "CallGRPC", + "description": "Defines the GRPC call to perform.", + "$ref": "#/$defs/taskBase", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "call", + "with" + ], + "properties": { + "call": { + "type": "string", + "const": "grpc" + }, + "with": { + "type": "object", + "title": "GRPCArguments", + "description": "The GRPC call arguments.", + "properties": { + "proto": { + "$ref": "#/$defs/externalResource", + "title": "WithGRPCProto", + "description": "The proto resource that describes the GRPC service to call." + }, + "service": { + "type": "object", + "title": "WithGRPCService", + "unevaluatedProperties": false, + "properties": { + "name": { + "type": "string", + "title": "WithGRPCServiceName", + "description": "The name of the GRPC service to call." + }, + "host": { + "type": "string", + "title": "WithGRPCServiceHost", + "description": "The hostname of the GRPC service to call.", + "pattern": "^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$" + }, + "port": { + "type": "integer", + "title": "WithGRPCServicePost", + "description": "The port number of the GRPC service to call.", + "minimum": 0, + "maximum": 65535 + }, + "authentication": { + "$ref": "#/$defs/referenceableAuthenticationPolicy", + "title": "WithGRPCServiceAuthentication", + "description": "The endpoint's authentication policy, if any." + } + }, + "required": [ + "name", + "host" + ] + }, + "method": { + "type": "string", + "title": "WithGRPCMethod", + "description": "The name of the method to call on the defined GRPC service." + }, + "arguments": { + "type": "object", + "title": "WithGRPCArguments", + "description": "The arguments, if any, to call the method with.", + "additionalProperties": true + } + }, + "required": [ + "proto", + "service", + "method" + ], + "unevaluatedProperties": false + } + } + }, + { + "title": "CallHTTP", + "description": "Defines the HTTP call to perform.", + "$ref": "#/$defs/taskBase", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "call", + "with" + ], + "properties": { + "call": { + "type": "string", + "const": "http" + }, + "with": { + "type": "object", + "title": "HTTPArguments", + "description": "The HTTP call arguments.", + "properties": { + "method": { + "type": "string", + "title": "WithHTTPMethod", + "description": "The HTTP method of the HTTP request to perform." + }, + "endpoint": { + "title": "WithHTTPEndpoint", + "description": "The HTTP endpoint to send the request to.", + "$ref": "#/$defs/endpoint" + }, + "headers": { + "type": "object", + "title": "WithHTTPHeaders", + "description": "A name/value mapping of the headers, if any, of the HTTP request to perform." + }, + "body": { + "title": "WithHTTPBody", + "description": "The body, if any, of the HTTP request to perform." + }, + "query": { + "type": "object", + "title": "WithHTTPQuery", + "description": "A name/value mapping of the query parameters, if any, of the HTTP request to perform.", + "additionalProperties": true + }, + "output": { + "type": "string", + "title": "WithHTTPOutput", + "description": "The http call output format. Defaults to 'content'.", + "enum": [ + "raw", + "content", + "response" + ] + } + }, + "required": [ + "method", + "endpoint" + ], + "unevaluatedProperties": false + } + } + }, + { + "title": "CallOpenAPI", + "description": "Defines the OpenAPI call to perform.", + "$ref": "#/$defs/taskBase", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "call", + "with" + ], + "properties": { + "call": { + "type": "string", + "const": "openapi" + }, + "with": { + "type": "object", + "title": "OpenAPIArguments", + "description": "The OpenAPI call arguments.", + "properties": { + "document": { + "$ref": "#/$defs/externalResource", + "title": "WithOpenAPIDocument", + "description": "The document that defines the OpenAPI operation to call." + }, + "operationId": { + "type": "string", + "title": "WithOpenAPIOperation", + "description": "The id of the OpenAPI operation to call." + }, + "parameters": { + "type": "object", + "title": "WithOpenAPIParameters", + "description": "A name/value mapping of the parameters of the OpenAPI operation to call.", + "additionalProperties": true + }, + "authentication": { + "$ref": "#/$defs/referenceableAuthenticationPolicy", + "title": "WithOpenAPIAuthentication", + "description": "The authentication policy, if any, to use when calling the OpenAPI operation." + }, + "output": { + "type": "string", + "enum": [ + "raw", + "content", + "response" + ], + "title": "WithOpenAPIOutput", + "description": "The http call output format. Defaults to 'content'." + } + }, + "required": [ + "document", + "operationId" + ], + "unevaluatedProperties": false + } + } + }, + { + "title": "CallFunction", + "description": "Defines the function call to perform.", + "$ref": "#/$defs/taskBase", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "call" + ], + "properties": { + "call": { + "type": "string", + "not": { + "enum": [ + "asyncapi", + "grpc", + "http", + "openapi" + ] + }, + "description": "The name of the function to call." + }, + "with": { + "type": "object", + "title": "FunctionArguments", + "description": "A name/value mapping of the parameters, if any, to call the function with.", + "additionalProperties": true + } + } + } + ] + }, + "forkTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "ForkTask", + "description": "Allows workflows to execute multiple tasks concurrently and optionally race them against each other, with a single possible winner, which sets the task's output.", + "unevaluatedProperties": false, + "required": [ + "fork" + ], + "properties": { + "fork": { + "type": "object", + "title": "ForkTaskConfiguration", + "description": "The configuration of the branches to perform concurrently.", + "unevaluatedProperties": false, + "required": [ + "branches" + ], + "properties": { + "branches": { + "$ref": "#/$defs/taskList", + "title": "ForkBranches" + }, + "compete": { + "type": "boolean", + "title": "ForkCompete", + "description": "Indicates whether or not the concurrent tasks are racing against each other, with a single possible winner, which sets the composite task's output.", + "default": false + } + } + } + } + }, + "doTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "DoTask", + "description": "Allows to execute a list of tasks in sequence.", + "unevaluatedProperties": false, + "required": [ + "do" + ], + "properties": { + "do": { + "$ref": "#/$defs/taskList", + "title": "DoTaskConfiguration", + "description": "The configuration of the tasks to perform sequentially." + } + } + }, + "emitTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "EmitTask", + "description": "Allows workflows to publish events to event brokers or messaging systems, facilitating communication and coordination between different components and services.", + "required": [ + "emit" + ], + "unevaluatedProperties": false, + "properties": { + "emit": { + "type": "object", + "title": "EmitTaskConfiguration", + "description": "The configuration of an event's emission.", + "unevaluatedProperties": false, + "properties": { + "event": { + "type": "object", + "title": "EmitEventDefinition", + "description": "The definition of the event to emit.", + "properties": { + "with": { + "$ref": "#/$defs/eventProperties", + "title": "EmitEventWith", + "description": "Defines the properties of event to emit.", + "required": [ + "source", + "type" + ] + } + }, + "additionalProperties": true + } + }, + "required": [ + "event" + ] + } + } + }, + "forTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "ForTask", + "description": "Allows workflows to iterate over a collection of items, executing a defined set of subtasks for each item in the collection. This task type is instrumental in handling scenarios such as batch processing, data transformation, and repetitive operations across datasets.", + "required": [ + "for", + "do" + ], + "unevaluatedProperties": false, + "properties": { + "for": { + "type": "object", + "title": "ForTaskConfiguration", + "description": "The definition of the loop that iterates over a range of values.", + "unevaluatedProperties": false, + "properties": { + "each": { + "type": "string", + "title": "ForEach", + "description": "The name of the variable used to store the current item being enumerated.", + "default": "item" + }, + "in": { + "type": "string", + "title": "ForIn", + "description": "A runtime expression used to get the collection to enumerate." + }, + "at": { + "type": "string", + "title": "ForAt", + "description": "The name of the variable used to store the index of the current item being enumerated.", + "default": "index" + } + }, + "required": [ + "in" + ] + }, + "while": { + "type": "string", + "title": "While", + "description": "A runtime expression that represents the condition, if any, that must be met for the iteration to continue." + }, + "do": { + "$ref": "#/$defs/taskList", + "title": "ForTaskDo" + } + } + }, + "listenTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "ListenTask", + "description": "Provides a mechanism for workflows to await and react to external events, enabling event-driven behavior within workflow systems.", + "required": [ + "listen" + ], + "unevaluatedProperties": false, + "properties": { + "listen": { + "type": "object", + "title": "ListenTaskConfiguration", + "description": "The configuration of the listener to use.", + "unevaluatedProperties": false, + "properties": { + "to": { + "$ref": "#/$defs/eventConsumptionStrategy", + "title": "ListenTo", + "description": "Defines the event(s) to listen to." + } + }, + "required": [ + "to" + ] + } + } + }, + "raiseTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "RaiseTask", + "description": "Intentionally triggers and propagates errors.", + "required": [ + "raise" + ], + "unevaluatedProperties": false, + "properties": { + "raise": { + "type": "object", + "title": "RaiseTaskConfiguration", + "description": "The definition of the error to raise.", + "unevaluatedProperties": false, + "properties": { + "error": { + "oneOf": [ + { + "$ref": "#/$defs/error", + "title": "RaiseErrorDefinition", + "description": "Defines the error to raise." + }, + { + "type": "string", + "title": "RaiseErrorReference", + "description": "The name of the error to raise" + } + ] + } + }, + "required": [ + "error" + ] + } + } + }, + "runTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "RunTask", + "description": "Provides the capability to execute external containers, shell commands, scripts, or workflows.", + "required": [ + "run" + ], + "unevaluatedProperties": false, + "properties": { + "run": { + "type": "object", + "title": "RunTaskConfiguration", + "description": "The configuration of the process to execute.", + "unevaluatedProperties": false, + "properties": { + "await": { + "type": "boolean", + "default": true, + "title": "AwaitProcessCompletion", + "description": "Whether to await the process completion before continuing." + } + }, + "oneOf": [ + { + "title": "RunContainer", + "description": "Enables the execution of external processes encapsulated within a containerized environment.", + "properties": { + "container": { + "type": "object", + "title": "Container", + "description": "The configuration of the container to run.", + "unevaluatedProperties": false, + "properties": { + "image": { + "type": "string", + "title": "ContainerImage", + "description": "The name of the container image to run." + }, + "command": { + "type": "string", + "title": "ContainerCommand", + "description": "The command, if any, to execute on the container." + }, + "ports": { + "type": "object", + "title": "ContainerPorts", + "description": "The container's port mappings, if any." + }, + "volumes": { + "type": "object", + "title": "ContainerVolumes", + "description": "The container's volume mappings, if any." + }, + "environment": { + "type": "object", + "title": "ContainerEnvironment", + "description": "A key/value mapping of the environment variables, if any, to use when running the configured process." + } + }, + "required": [ + "image" + ] + } + }, + "required": [ + "container" + ] + }, + { + "title": "RunScript", + "description": "Enables the execution of custom scripts or code within a workflow, empowering workflows to perform specialized logic, data processing, or integration tasks by executing user-defined scripts written in various programming languages.", + "properties": { + "script": { + "type": "object", + "title": "Script", + "description": "The configuration of the script to run.", + "unevaluatedProperties": false, + "properties": { + "language": { + "type": "string", + "title": "ScriptLanguage", + "description": "The language of the script to run." + }, + "arguments": { + "type": "object", + "title": "ScriptArguments", + "description": "A key/value mapping of the arguments, if any, to use when running the configured script.", + "additionalProperties": true + }, + "environment": { + "type": "object", + "title": "ScriptEnvironment", + "description": "A key/value mapping of the environment variables, if any, to use when running the configured script process.", + "additionalProperties": true + } + }, + "oneOf": [ + { + "title": "InlineScript", + "type": "object", + "description": "The script's code.", + "properties": { + "code": { + "type": "string", + "title": "InlineScriptCode" + } + }, + "required": [ + "code" + ] + }, + { + "title": "ExternalScript", + "type": "object", + "description": "The script's resource.", + "properties": { + "source": { + "$ref": "#/$defs/externalResource", + "title": "ExternalScriptResource" + } + }, + "required": [ + "source" + ] + } + ], + "required": [ + "language" + ] + } + }, + "required": [ + "script" + ] + }, + { + "title": "RunShell", + "description": "Enables the execution of shell commands within a workflow, enabling workflows to interact with the underlying operating system and perform system-level operations, such as file manipulation, environment configuration, or system administration tasks.", + "properties": { + "shell": { + "type": "object", + "title": "Shell", + "description": "The configuration of the shell command to run.", + "unevaluatedProperties": false, + "properties": { + "command": { + "type": "string", + "title": "ShellCommand", + "description": "The shell command to run." + }, + "arguments": { + "type": "object", + "title": "ShellArguments", + "description": "A list of the arguments of the shell command to run.", + "additionalProperties": true + }, + "environment": { + "type": "object", + "title": "ShellEnvironment", + "description": "A key/value mapping of the environment variables, if any, to use when running the configured process.", + "additionalProperties": true + } + }, + "required": [ + "command" + ] + } + }, + "required": [ + "shell" + ] + }, + { + "title": "RunWorkflow", + "description": "Enables the invocation and execution of nested workflows within a parent workflow, facilitating modularization, reusability, and abstraction of complex logic or business processes by encapsulating them into standalone workflow units.", + "properties": { + "workflow": { + "type": "object", + "title": "SubflowConfiguration", + "description": "The configuration of the workflow to run.", + "unevaluatedProperties": false, + "properties": { + "namespace": { + "type": "string", + "title": "SubflowNamespace", + "description": "The namespace the workflow to run belongs to." + }, + "name": { + "type": "string", + "title": "SubflowName", + "description": "The name of the workflow to run." + }, + "version": { + "type": "string", + "default": "latest", + "title": "SubflowVersion", + "description": "The version of the workflow to run. Defaults to latest." + }, + "input": { + "type": "object", + "title": "SubflowInput", + "description": "The data, if any, to pass as input to the workflow to execute. The value should be validated against the target workflow's input schema, if specified.", + "additionalProperties": true + } + }, + "required": [ + "namespace", + "name", + "version" + ] + } + }, + "required": [ + "workflow" + ] + } + ] + } + } + }, + "setTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "SetTask", + "description": "A task used to set data.", + "required": [ + "set" + ], + "unevaluatedProperties": false, + "properties": { + "set": { + "type": "object", + "title": "SetTaskConfiguration", + "description": "The data to set.", + "minProperties": 1, + "additionalProperties": true + } + } + }, + "switchTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "SwitchTask", + "description": "Enables conditional branching within workflows, allowing them to dynamically select different paths based on specified conditions or criteria.", + "required": [ + "switch" + ], + "unevaluatedProperties": false, + "properties": { + "switch": { + "type": "array", + "title": "SwitchTaskConfiguration", + "description": "The definition of the switch to use.", + "minItems": 1, + "items": { + "type": "object", + "title": "SwitchItem", + "minProperties": 1, + "maxProperties": 1, + "additionalProperties": { + "type": "object", + "title": "SwitchCase", + "description": "The definition of a case within a switch task, defining a condition and corresponding tasks to execute if the condition is met.", + "unevaluatedProperties": false, + "required": [ + "then" + ], + "properties": { + "when": { + "type": "string", + "title": "SwitchCaseCondition", + "description": "A runtime expression used to determine whether or not the case matches." + }, + "then": { + "$ref": "#/$defs/flowDirective", + "title": "SwitchCaseOutcome", + "description": "The flow directive to execute when the case matches." + } + } + } + } + } + } + }, + "tryTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "TryTask", + "description": "Serves as a mechanism within workflows to handle errors gracefully, potentially retrying failed tasks before proceeding with alternate ones.", + "required": [ + "try", + "catch" + ], + "unevaluatedProperties": false, + "properties": { + "try": { + "$ref": "#/$defs/taskList", + "title": "TryTaskConfiguration", + "description": "The task(s) to perform." + }, + "catch": { + "type": "object", + "title": "TryTaskCatch", + "description": "The object used to define the errors to catch.", + "unevaluatedProperties": false, + "properties": { + "errors": { + "type": "object", + "title": "CatchErrors", + "description": "The configuration of a concept used to catch errors." + }, + "as": { + "type": "string", + "title": "CatchAs", + "description": "The name of the runtime expression variable to save the error as. Defaults to 'error'." + }, + "when": { + "type": "string", + "title": "CatchWhen", + "description": "A runtime expression used to determine whether or not to catch the filtered error." + }, + "exceptWhen": { + "type": "string", + "title": "CatchExceptWhen", + "description": "A runtime expression used to determine whether or not to catch the filtered error." + }, + "retry": { + "oneOf": [ + { + "$ref": "#/$defs/retryPolicy", + "title": "RetryPolicyDefinition", + "description": "The retry policy to use, if any, when catching errors." + }, + { + "type": "string", + "title": "RetryPolicyReference", + "description": "The name of the retry policy to use, if any, when catching errors." + } + ] + }, + "do": { + "$ref": "#/$defs/taskList", + "title": "TryTaskCatchDo", + "description": "The definition of the task(s) to run when catching an error." + } + } + } + } + }, + "waitTask": { + "type": "object", + "$ref": "#/$defs/taskBase", + "title": "WaitTask", + "description": "Allows workflows to pause or delay their execution for a specified period of time.", + "required": [ + "wait" + ], + "unevaluatedProperties": false, + "properties": { + "wait": { + "$ref": "#/$defs/duration", + "title": "WaitTaskConfiguration", + "description": "The amount of time to wait." + } + } + }, + "flowDirective": { + "title": "FlowDirective", + "description": "Represents different transition options for a workflow.", + "anyOf": [ + { + "type": "string", + "enum": [ + "continue", + "exit", + "end" + ], + "default": "continue" + }, + { + "type": "string" + } + ] + }, + "referenceableAuthenticationPolicy": { + "type": "object", + "title": "ReferenceableAuthenticationPolicy", + "description": "Represents a referenceable authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "AuthenticationPolicyReference", + "description": "The reference of the authentication policy to use.", + "properties": { + "use": { + "type": "string", + "minLength": 1, + "title": "ReferenceableAuthenticationPolicyName", + "description": "The name of the authentication policy to use." + } + }, + "required": [ + "use" + ] + }, + { + "$ref": "#/$defs/authenticationPolicy" + } + ] + }, + "secretBasedAuthenticationPolicy": { + "type": "object", + "title": "SecretBasedAuthenticationPolicy", + "description": "Represents an authentication policy based on secrets.", + "unevaluatedProperties": false, + "properties": { + "use": { + "type": "string", + "minLength": 1, + "title": "SecretBasedAuthenticationPolicyName", + "description": "The name of the authentication policy to use." + } + }, + "required": [ + "use" + ] + }, + "authenticationPolicy": { + "type": "object", + "title": "AuthenticationPolicy", + "description": "Defines an authentication policy.", + "oneOf": [ + { + "title": "BasicAuthenticationPolicy", + "description": "Use basic authentication.", + "properties": { + "basic": { + "type": "object", + "title": "BasicAuthenticationPolicyConfiguration", + "description": "The configuration of the basic authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "BasicAuthenticationProperties", + "description": "Inline configuration of the basic authentication policy.", + "properties": { + "username": { + "type": "string", + "description": "The username to use." + }, + "password": { + "type": "string", + "description": "The password to use." + } + }, + "required": [ + "username", + "password" + ] + }, + { + "$ref": "#/$defs/secretBasedAuthenticationPolicy", + "title": "BasicAuthenticationPolicySecret", + "description": "Secret based configuration of the basic authentication policy." + } + ] + } + }, + "required": [ + "basic" + ] + }, + { + "title": "BearerAuthenticationPolicy", + "description": "Use bearer authentication.", + "properties": { + "bearer": { + "type": "object", + "title": "BearerAuthenticationPolicyConfiguration", + "description": "The configuration of the bearer authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "BearerAuthenticationProperties", + "description": "Inline configuration of the bearer authentication policy.", + "properties": { + "token": { + "type": "string", + "description": "The bearer token to use." + } + }, + "required": [ + "token" + ] + }, + { + "$ref": "#/$defs/secretBasedAuthenticationPolicy", + "title": "BearerAuthenticationPolicySecret", + "description": "Secret based configuration of the bearer authentication policy." + } + ] + } + }, + "required": [ + "bearer" + ] + }, + { + "title": "DigestAuthenticationPolicy", + "description": "Use digest authentication.", + "properties": { + "digest": { + "type": "object", + "title": "DigestAuthenticationPolicyConfiguration", + "description": "The configuration of the digest authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "DigestAuthenticationProperties", + "description": "Inline configuration of the digest authentication policy.", + "properties": { + "username": { + "type": "string", + "description": "The username to use." + }, + "password": { + "type": "string", + "description": "The password to use." + } + }, + "required": [ + "username", + "password" + ] + }, + { + "$ref": "#/$defs/secretBasedAuthenticationPolicy", + "title": "DigestAuthenticationPolicySecret", + "description": "Secret based configuration of the digest authentication policy." + } + ] + } + }, + "required": [ + "digest" + ] + }, + { + "title": "OAuth2AuthenticationPolicy", + "description": "Use OAuth2 authentication.", + "properties": { + "oauth2": { + "type": "object", + "title": "OAuth2AuthenticationPolicyConfiguration", + "description": "The configuration of the OAuth2 authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "type": "object", + "title": "OAuth2ConnectAuthenticationProperties", + "description": "The inline configuration of the OAuth2 authentication policy.", + "unevaluatedProperties": false, + "allOf": [ + { + "$ref": "#/$defs/oauth2AuthenticationProperties" + }, + { + "type": "object", + "properties": { + "endpoints": { + "type": "object", + "title": "OAuth2AuthenticationPropertiesEndpoints", + "description": "The endpoint configurations for OAuth2.", + "properties": { + "token": { + "type": "string", + "format": "uri-template", + "default": "/oauth2/token", + "title": "OAuth2TokenEndpoint", + "description": "The relative path to the token endpoint. Defaults to `/oauth2/token`." + }, + "revocation": { + "type": "string", + "format": "uri-template", + "default": "/oauth2/revoke", + "title": "OAuth2RevocationEndpoint", + "description": "The relative path to the revocation endpoint. Defaults to `/oauth2/revoke`." + }, + "introspection": { + "type": "string", + "format": "uri-template", + "default": "/oauth2/introspect", + "title": "OAuth2IntrospectionEndpoint", + "description": "The relative path to the introspection endpoint. Defaults to `/oauth2/introspect`." + } + } + } + } + } + ] + }, + { + "$ref": "#/$defs/secretBasedAuthenticationPolicy", + "title": "OAuth2AuthenticationPolicySecret", + "description": "Secret based configuration of the OAuth2 authentication policy." + } + ] + } + }, + "required": [ + "oauth2" + ] + }, + { + "title": "OpenIdConnectAuthenticationPolicy", + "description": "Use OpenIdConnect authentication.", + "properties": { + "oidc": { + "type": "object", + "title": "OpenIdConnectAuthenticationPolicyConfiguration", + "description": "The configuration of the OpenIdConnect authentication policy.", + "unevaluatedProperties": false, + "oneOf": [ + { + "$ref": "#/$defs/oauth2AuthenticationProperties", + "title": "OpenIdConnectAuthenticationProperties", + "description": "The inline configuration of the OpenIdConnect authentication policy.", + "unevaluatedProperties": false + }, + { + "$ref": "#/$defs/secretBasedAuthenticationPolicy", + "title": "OpenIdConnectAuthenticationPolicySecret", + "description": "Secret based configuration of the OpenIdConnect authentication policy." + } + ] + } + }, + "required": [ + "oidc" + ] + } + ] + }, + "oauth2AuthenticationProperties": { + "type": "object", + "title": "OAuth2AutenthicationData", + "description": "Inline configuration of the OAuth2 authentication policy.", + "properties": { + "authority": { + "type": "string", + "format": "uri-template", + "title": "OAuth2AutenthicationDataAuthority", + "description": "The URI that references the OAuth2 authority to use." + }, + "grant": { + "type": "string", + "enum": [ + "authorization_code", + "client_credentials", + "password", + "refresh_token", + "urn:ietf:params:oauth:grant-type:token-exchange" + ], + "title": "OAuth2AutenthicationDataGrant", + "description": "The grant type to use." + }, + "client": { + "type": "object", + "title": "OAuth2AutenthicationDataClient", + "description": "The definition of an OAuth2 client.", + "unevaluatedProperties": false, + "properties": { + "id": { + "type": "string", + "title": "ClientId", + "description": "The client id to use." + }, + "secret": { + "type": "string", + "title": "ClientSecret", + "description": "The client secret to use, if any." + }, + "assertion": { + "type": "string", + "title": "ClientAssertion", + "description": "A JWT containing a signed assertion with your application credentials." + }, + "authentication": { + "type": "string", + "enum": [ + "client_secret_basic", + "client_secret_post", + "client_secret_jwt", + "private_key_jwt", + "none" + ], + "default": "client_secret_post", + "title": "ClientAuthentication", + "description": "The authentication method to use to authenticate the client." + } + } + }, + "request": { + "type": "object", + "title": "OAuth2TokenRequest", + "description": "The configuration of an OAuth2 token request", + "properties": { + "encoding": { + "type": "string", + "enum": [ + "application/x-www-form-urlencoded", + "application/json" + ], + "default": "application/x-www-form-urlencoded", + "title": "Oauth2TokenRequestEncoding" + } + } + }, + "issuers": { + "type": "array", + "title": "OAuth2Issuers", + "description": "A list that contains that contains valid issuers that will be used to check against the issuer of generated tokens.", + "items": { + "type": "string" + } + }, + "scopes": { + "type": "array", + "title": "OAuth2AutenthicationDataScopes", + "description": "The scopes, if any, to request the token for.", + "items": { + "type": "string" + } + }, + "audiences": { + "type": "array", + "title": "OAuth2AutenthicationDataAudiences", + "description": "The audiences, if any, to request the token for.", + "items": { + "type": "string" + } + }, + "username": { + "type": "string", + "title": "OAuth2AutenthicationDataUsername", + "description": "The username to use. Used only if the grant type is Password." + }, + "password": { + "type": "string", + "title": "OAuth2AutenthicationDataPassword", + "description": "The password to use. Used only if the grant type is Password." + }, + "subject": { + "$ref": "#/$defs/oauth2Token", + "title": "OAuth2AutenthicationDataSubject", + "description": "The security token that represents the identity of the party on behalf of whom the request is being made." + }, + "actor": { + "$ref": "#/$defs/oauth2Token", + "title": "OAuth2AutenthicationDataActor", + "description": "The security token that represents the identity of the acting party." + } + } + }, + "oauth2Token": { + "type": "object", + "title": "OAuth2TokenDefinition", + "description": "Represents an OAuth2 token.", + "unevaluatedProperties": false, + "properties": { + "token": { + "type": "string", + "title": "OAuth2Token", + "description": "The security token to use." + }, + "type": { + "type": "string", + "title": "OAuth2TokenType", + "description": "The type of the security token to use." + } + }, + "required": [ + "token", + "type" + ] + }, + "duration": { + "oneOf": [ + { + "type": "object", + "minProperties": 1, + "unevaluatedProperties": false, + "properties": { + "days": { + "type": "integer", + "title": "DurationDays", + "description": "Number of days, if any." + }, + "hours": { + "type": "integer", + "title": "DurationHours", + "description": "Number of days, if any." + }, + "minutes": { + "type": "integer", + "title": "DurationMinutes", + "description": "Number of minutes, if any." + }, + "seconds": { + "type": "integer", + "title": "DurationSeconds", + "description": "Number of seconds, if any." + }, + "milliseconds": { + "type": "integer", + "title": "DurationMilliseconds", + "description": "Number of milliseconds, if any." + } + }, + "title": "DurationInline", + "description": "The inline definition of a duration." + }, + { + "type": "string", + "pattern": "^P(?!$)(\\d+(?:\\.\\d+)?Y)?(\\d+(?:\\.\\d+)?M)?(\\d+(?:\\.\\d+)?W)?(\\d+(?:\\.\\d+)?D)?(T(?=\\d)(\\d+(?:\\.\\d+)?H)?(\\d+(?:\\.\\d+)?M)?(\\d+(?:\\.\\d+)?S)?)?$", + "title": "DurationExpression", + "description": "The ISO 8601 expression of a duration." + } + ] + }, + "error": { + "type": "object", + "title": "Error", + "description": "Represents an error.", + "unevaluatedProperties": false, + "properties": { + "type": { + "title": "ErrorType", + "description": "A URI reference that identifies the error type.", + "oneOf": [ + { + "title": "LiteralErrorType", + "description": "The literal error type.", + "type": "string", + "format": "uri-template" + }, + { + "$ref": "#/$defs/runtimeExpression", + "title": "ExpressionErrorType", + "description": "An expression based error type." + } + ] + }, + "status": { + "type": "integer", + "title": "ErrorStatus", + "description": "The status code generated by the origin for this occurrence of the error." + }, + "instance": { + "title": "ErrorInstance", + "description": "A JSON Pointer used to reference the component the error originates from.", + "oneOf": [ + { + "title": "LiteralErrorInstance", + "description": "The literal error instance.", + "type": "string", + "format": "json-pointer" + }, + { + "$ref": "#/$defs/runtimeExpression", + "title": "ExpressionErrorInstance", + "description": "An expression based error instance." + } + ] + }, + "title": { + "type": "string", + "title": "ErrorTitle", + "description": "A short, human-readable summary of the error." + }, + "detail": { + "type": "string", + "title": "ErrorDetails", + "description": "A human-readable explanation specific to this occurrence of the error." + } + }, + "required": [ + "type", + "status" + ] + }, + "endpoint": { + "title": "Endpoint", + "description": "Represents an endpoint.", + "oneOf": [ + { + "$ref": "#/$defs/runtimeExpression" + }, + { + "title": "LiteralEndpoint", + "type": "string", + "format": "uri-template" + }, + { + "title": "EndpointConfiguration", + "type": "object", + "unevaluatedProperties": false, + "properties": { + "uri": { + "title": "EndpointUri", + "description": "The endpoint's URI.", + "oneOf": [ + { + "title": "LiteralEndpointURI", + "description": "The literal endpoint's URI.", + "type": "string", + "format": "uri-template" + }, + { + "$ref": "#/$defs/runtimeExpression", + "title": "ExpressionEndpointURI", + "description": "An expression based endpoint's URI." + } + ] + }, + "authentication": { + "$ref": "#/$defs/referenceableAuthenticationPolicy", + "title": "EndpointAuthentication", + "description": "The authentication policy to use." + } + }, + "required": [ + "uri" + ] + } + ] + }, + "eventProperties": { + "type": "object", + "title": "EventProperties", + "description": "Describes the properties of an event.", + "properties": { + "id": { + "type": "string", + "title": "EventId", + "description": "The event's unique identifier." + }, + "source": { + "title": "EventSource", + "description": "Identifies the context in which an event happened.", + "oneOf": [ + { + "title": "LiteralSource", + "type": "string", + "format": "uri-template" + }, + { + "$ref": "#/$defs/runtimeExpression" + } + ] + }, + "type": { + "type": "string", + "title": "EventType", + "description": "This attribute contains a value describing the type of event related to the originating occurrence." + }, + "time": { + "title": "EventTime", + "description": "When the event occured.", + "oneOf": [ + { + "title": "LiteralTime", + "type": "string", + "format": "date-time" + }, + { + "$ref": "#/$defs/runtimeExpression" + } + ] + }, + "subject": { + "type": "string", + "title": "EventSubject", + "description": "The subject of the event." + }, + "datacontenttype": { + "type": "string", + "title": "EventDataContentType", + "description": "Content type of data value. This attribute enables data to carry any type of content, whereby format and encoding might differ from that of the chosen event format." + }, + "dataschema": { + "title": "EventDataschema", + "description": "The schema describing the event format.", + "oneOf": [ + { + "title": "LiteralDataSchema", + "description": "The literal event data schema.", + "type": "string", + "format": "uri-template" + }, + { + "$ref": "#/$defs/runtimeExpression", + "title": "ExpressionDataSchema", + "description": "An expression based event data schema." + } + ] + } + }, + "additionalProperties": true + }, + "eventConsumptionStrategy": { + "type": "object", + "title": "EventConsumptionStrategy", + "description": "Describe the event consumption strategy to adopt.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "AllEventConsumptionStrategy", + "properties": { + "all": { + "type": "array", + "title": "AllEventConsumptionStrategyConfiguration", + "description": "A list containing all the events that must be consumed.", + "items": { + "$ref": "#/$defs/eventFilter" + } + } + }, + "required": [ + "all" + ] + }, + { + "title": "AnyEventConsumptionStrategy", + "properties": { + "any": { + "type": "array", + "title": "AnyEventConsumptionStrategyConfiguration", + "description": "A list containing any of the events to consume.", + "items": { + "$ref": "#/$defs/eventFilter" + } + } + }, + "required": [ + "any" + ] + }, + { + "title": "OneEventConsumptionStrategy", + "properties": { + "one": { + "$ref": "#/$defs/eventFilter", + "title": "OneEventConsumptionStrategyConfiguration", + "description": "The single event to consume." + } + }, + "required": [ + "one" + ] + } + ] + }, + "eventFilter": { + "type": "object", + "title": "EventFilter", + "description": "An event filter is a mechanism used to selectively process or handle events based on predefined criteria, such as event type, source, or specific attributes.", + "unevaluatedProperties": false, + "properties": { + "with": { + "$ref": "#/$defs/eventProperties", + "minProperties": 1, + "title": "WithEvent", + "description": "An event filter is a mechanism used to selectively process or handle events based on predefined criteria, such as event type, source, or specific attributes." + }, + "correlate": { + "type": "object", + "title": "EventFilterCorrelate", + "description": "A correlation is a link between events and data, established by mapping event attributes to specific data attributes, allowing for coordinated processing or handling based on event characteristics.", + "additionalProperties": { + "type": "object", + "properties": { + "from": { + "type": "string", + "title": "CorrelateFrom", + "description": "A runtime expression used to extract the correlation value from the filtered event." + }, + "expect": { + "type": "string", + "title": "CorrelateExpect", + "description": "A constant or a runtime expression, if any, used to determine whether or not the extracted correlation value matches expectations. If not set, the first extracted value will be used as the correlation's expectation." + } + }, + "required": [ + "from" + ] + } + } + }, + "required": [ + "with" + ] + }, + "extension": { + "type": "object", + "title": "Extension", + "description": "The definition of an extension.", + "unevaluatedProperties": false, + "properties": { + "extend": { + "type": "string", + "enum": [ + "call", + "composite", + "emit", + "for", + "listen", + "raise", + "run", + "set", + "switch", + "try", + "wait", + "all" + ], + "title": "ExtensionTarget", + "description": "The type of task to extend." + }, + "when": { + "type": "string", + "title": "ExtensionCondition", + "description": "A runtime expression, if any, used to determine whether or not the extension should apply in the specified context." + }, + "before": { + "$ref": "#/$defs/taskList", + "title": "ExtensionDoBefore", + "description": "The task(s) to execute before the extended task, if any." + }, + "after": { + "$ref": "#/$defs/taskList", + "title": "ExtensionDoAfter", + "description": "The task(s) to execute after the extended task, if any." + } + }, + "required": [ + "extend" + ] + }, + "externalResource": { + "type": "object", + "title": "ExternalResource", + "description": "Represents an external resource.", + "unevaluatedProperties": false, + "properties": { + "name": { + "type": "string", + "title": "ExternalResourceName", + "description": "The name of the external resource, if any." + }, + "endpoint": { + "$ref": "#/$defs/endpoint", + "title": "ExternalResourceEndpoint", + "description": "The endpoint of the external resource." + } + }, + "required": [ + "endpoint" + ] + }, + "input": { + "type": "object", + "title": "Input", + "description": "Configures the input of a workflow or task.", + "unevaluatedProperties": false, + "properties": { + "schema": { + "$ref": "#/$defs/schema", + "title": "InputSchema", + "description": "The schema used to describe and validate the input of the workflow or task." + }, + "from": { + "title": "InputFrom", + "description": "A runtime expression, if any, used to mutate and/or filter the input of the workflow or task.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] + } + } + }, + "output": { + "type": "object", + "title": "Output", + "description": "Configures the output of a workflow or task.", + "unevaluatedProperties": false, + "properties": { + "schema": { + "$ref": "#/$defs/schema", + "title": "OutputSchema", + "description": "The schema used to describe and validate the output of the workflow or task." + }, + "as": { + "title": "OutputAs", + "description": "A runtime expression, if any, used to mutate and/or filter the output of the workflow or task.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] + } + } + }, + "export": { + "type": "object", + "title": "Export", + "description": "Set the content of the context. .", + "unevaluatedProperties": false, + "properties": { + "schema": { + "$ref": "#/$defs/schema", + "title": "ExportSchema", + "description": "The schema used to describe and validate the workflow context." + }, + "as": { + "title": "ExportAs", + "description": "A runtime expression, if any, used to export the output data to the context.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] + } + } + }, + "retryPolicy": { + "type": "object", + "title": "RetryPolicy", + "description": "Defines a retry policy.", + "unevaluatedProperties": false, + "properties": { + "when": { + "type": "string", + "title": "RetryWhen", + "description": "A runtime expression, if any, used to determine whether or not to retry running the task, in a given context." + }, + "exceptWhen": { + "type": "string", + "title": "RetryExcepWhen", + "description": "A runtime expression used to determine whether or not to retry running the task, in a given context." + }, + "delay": { + "$ref": "#/$defs/duration", + "title": "RetryDelay", + "description": "The duration to wait between retry attempts." + }, + "backoff": { + "type": "object", + "title": "RetryBackoff", + "description": "The retry duration backoff.", + "unevaluatedProperties": false, + "oneOf": [ + { + "title": "ConstantBackoff", + "properties": { + "constant": { + "type": "object", + "description": "The definition of the constant backoff to use, if any." + } + }, + "required": [ + "constant" + ] + }, + { + "title": "ExponentialBackOff", + "properties": { + "exponential": { + "type": "object", + "description": "The definition of the exponential backoff to use, if any." + } + }, + "required": [ + "exponential" + ] + }, + { + "title": "LinearBackoff", + "properties": { + "linear": { + "type": "object", + "description": "The definition of the linear backoff to use, if any." + } + }, + "required": [ + "linear" + ] + } + ] + }, + "limit": { + "type": "object", + "title": "RetryLimit", + "unevaluatedProperties": false, + "properties": { + "attempt": { + "type": "object", + "title": "RetryLimitAttempt", + "unevaluatedProperties": false, + "properties": { + "count": { + "type": "integer", + "title": "RetryLimitAttemptCount", + "description": "The maximum amount of retry attempts, if any." + }, + "duration": { + "$ref": "#/$defs/duration", + "title": "RetryLimitAttemptDuration", + "description": "The maximum duration for each retry attempt." + } + } + }, + "duration": { + "$ref": "#/$defs/duration", + "title": "RetryLimitDuration", + "description": "The duration limit, if any, for all retry attempts." + } + }, + "description": "The retry limit, if any." + }, + "jitter": { + "type": "object", + "title": "RetryPolicyJitter", + "description": "The parameters, if any, that control the randomness or variability of the delay between retry attempts.", + "unevaluatedProperties": false, + "properties": { + "from": { + "$ref": "#/$defs/duration", + "title": "RetryPolicyJitterFrom", + "description": "The minimum duration of the jitter range." + }, + "to": { + "$ref": "#/$defs/duration", + "title": "RetryPolicyJitterTo", + "description": "The maximum duration of the jitter range." + } + }, + "required": [ + "from", + "to" + ] + } + } + }, + "schema": { + "type": "object", + "title": "Schema", + "description": "Represents the definition of a schema.", + "unevaluatedProperties": false, + "properties": { + "format": { + "type": "string", + "default": "json", + "title": "SchemaFormat", + "description": "The schema's format. Defaults to 'json'. The (optional) version of the format can be set using `{format}:{version}`." + } + }, + "oneOf": [ + { + "title": "SchemaInline", + "properties": { + "document": { + "description": "The schema's inline definition." + } + }, + "required": [ + "document" + ] + }, + { + "title": "SchemaExternal", + "properties": { + "resource": { + "$ref": "#/$defs/externalResource", + "title": "SchemaExternalResource", + "description": "The schema's external resource." + } + }, + "required": [ + "resource" + ] + } + ] + }, + "timeout": { + "type": "object", + "title": "Timeout", + "description": "The definition of a timeout.", + "unevaluatedProperties": false, + "properties": { + "after": { + "$ref": "#/$defs/duration", + "title": "TimeoutAfter", + "description": "The duration after which to timeout." + } + }, + "required": [ + "after" + ] + }, + "runtimeExpression": { + "type": "string", + "title": "RuntimeExpression", + "description": "A runtime expression.", + "pattern": "^\\s*\\$\\{.+\\}\\s*$" + } + } +} diff --git a/kubernetes/spec/spec.go b/kubernetes/spec/spec.go new file mode 100644 index 0000000..990f48d --- /dev/null +++ b/kubernetes/spec/spec.go @@ -0,0 +1,893 @@ +// Code generated by github.com/atombender/go-jsonschema, DO NOT EDIT. + +package spec + +import "encoding/json" +import "fmt" +import "reflect" + +// Defines an authentication policy. +type AuthenticationPolicy map[string]interface{} + +// Defines the call to perform. +type CallTask interface{} + +type Duration interface{} + +// Represents an endpoint. +type Endpoint interface{} + +// Represents an error. +type Error struct { + // A human-readable explanation specific to this occurrence of the error. + Detail *string `json:"detail,omitempty"` + + // A JSON Pointer used to reference the component the error originates from. + Instance interface{} `json:"instance,omitempty"` + + // The status code generated by the origin for this occurrence of the error. + Status int `json:"status"` + + // A short, human-readable summary of the error. + Title *string `json:"title,omitempty"` + + // A URI reference that identifies the error type. + Type interface{} `json:"type"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Error) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["status"]; raw != nil && !ok { + return fmt.Errorf("field status in Error: required") + } + if _, ok := raw["type"]; raw != nil && !ok { + return fmt.Errorf("field type in Error: required") + } + type Plain Error + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Error(plain) + return nil +} + +// Describe the event consumption strategy to adopt. +type EventConsumptionStrategy map[string]interface{} + +// An event filter is a mechanism used to selectively process or handle events +// based on predefined criteria, such as event type, source, or specific +// attributes. +type EventFilter struct { + // A correlation is a link between events and data, established by mapping event + // attributes to specific data attributes, allowing for coordinated processing or + // handling based on event characteristics. + Correlate EventFilterCorrelate `json:"correlate,omitempty"` + + // An event filter is a mechanism used to selectively process or handle events + // based on predefined criteria, such as event type, source, or specific + // attributes. + With EventProperties `json:"with"` +} + +// A correlation is a link between events and data, established by mapping event +// attributes to specific data attributes, allowing for coordinated processing or +// handling based on event characteristics. +type EventFilterCorrelate map[string]struct { + // A constant or a runtime expression, if any, used to determine whether or not + // the extracted correlation value matches expectations. If not set, the first + // extracted value will be used as the correlation's expectation. + Expect *string `json:"expect,omitempty"` + + // A runtime expression used to extract the correlation value from the filtered + // event. + From string `json:"from"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *EventFilter) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["with"]; raw != nil && !ok { + return fmt.Errorf("field with in EventFilter: required") + } + type Plain EventFilter + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = EventFilter(plain) + return nil +} + +// Describes the properties of an event. +type EventProperties struct { + // Content type of data value. This attribute enables data to carry any type of + // content, whereby format and encoding might differ from that of the chosen event + // format. + Datacontenttype *string `json:"datacontenttype,omitempty"` + + // The schema describing the event format. + Dataschema interface{} `json:"dataschema,omitempty"` + + // The event's unique identifier. + Id *string `json:"id,omitempty"` + + // Identifies the context in which an event happened. + Source interface{} `json:"source,omitempty"` + + // The subject of the event. + Subject *string `json:"subject,omitempty"` + + // When the event occured. + Time interface{} `json:"time,omitempty"` + + // This attribute contains a value describing the type of event related to the + // originating occurrence. + Type *string `json:"type,omitempty"` + + AdditionalProperties interface{} +} + +// Set the content of the context. . +type Export struct { + // A runtime expression, if any, used to export the output data to the context. + As interface{} `json:"as,omitempty"` + + // The schema used to describe and validate the workflow context. + Schema *Schema `json:"schema,omitempty"` +} + +// The definition of an extension. +type Extension struct { + // The task(s) to execute after the extended task, if any. + After TaskList `json:"after,omitempty"` + + // The task(s) to execute before the extended task, if any. + Before TaskList `json:"before,omitempty"` + + // The type of task to extend. + Extend ExtensionExtend `json:"extend"` + + // A runtime expression, if any, used to determine whether or not the extension + // should apply in the specified context. + When *string `json:"when,omitempty"` +} + +type ExtensionExtend string + +const ExtensionExtendAll ExtensionExtend = "all" +const ExtensionExtendCall ExtensionExtend = "call" +const ExtensionExtendComposite ExtensionExtend = "composite" +const ExtensionExtendEmit ExtensionExtend = "emit" +const ExtensionExtendFor ExtensionExtend = "for" +const ExtensionExtendListen ExtensionExtend = "listen" +const ExtensionExtendRaise ExtensionExtend = "raise" +const ExtensionExtendRun ExtensionExtend = "run" +const ExtensionExtendSet ExtensionExtend = "set" +const ExtensionExtendSwitch ExtensionExtend = "switch" +const ExtensionExtendTry ExtensionExtend = "try" +const ExtensionExtendWait ExtensionExtend = "wait" + +var enumValues_ExtensionExtend = []interface{}{ + "call", + "composite", + "emit", + "for", + "listen", + "raise", + "run", + "set", + "switch", + "try", + "wait", + "all", +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *ExtensionExtend) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { + return err + } + var ok bool + for _, expected := range enumValues_ExtensionExtend { + if reflect.DeepEqual(v, expected) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_ExtensionExtend, v) + } + *j = ExtensionExtend(v) + return nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Extension) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["extend"]; raw != nil && !ok { + return fmt.Errorf("field extend in Extension: required") + } + type Plain Extension + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Extension(plain) + return nil +} + +// Represents an external resource. +type ExternalResource struct { + // The endpoint of the external resource. + Endpoint ExternalResourceEndpoint `json:"endpoint"` + + // The name of the external resource, if any. + Name *string `json:"name,omitempty"` +} + +// The endpoint of the external resource. +type ExternalResourceEndpoint interface{} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *ExternalResource) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["endpoint"]; raw != nil && !ok { + return fmt.Errorf("field endpoint in ExternalResource: required") + } + type Plain ExternalResource + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = ExternalResource(plain) + return nil +} + +// Represents different transition options for a workflow. +type FlowDirective interface{} + +// Configures the input of a workflow or task. +type Input struct { + // A runtime expression, if any, used to mutate and/or filter the input of the + // workflow or task. + From interface{} `json:"from,omitempty"` + + // The schema used to describe and validate the input of the workflow or task. + Schema *Schema `json:"schema,omitempty"` +} + +// Inline configuration of the OAuth2 authentication policy. +type Oauth2AuthenticationProperties struct { + // The security token that represents the identity of the acting party. + Actor *Oauth2Token `json:"actor,omitempty"` + + // The audiences, if any, to request the token for. + Audiences []string `json:"audiences,omitempty"` + + // The URI that references the OAuth2 authority to use. + Authority *string `json:"authority,omitempty"` + + // The definition of an OAuth2 client. + Client *Oauth2AuthenticationPropertiesClient `json:"client,omitempty"` + + // The grant type to use. + Grant *Oauth2AuthenticationPropertiesGrant `json:"grant,omitempty"` + + // A list that contains that contains valid issuers that will be used to check + // against the issuer of generated tokens. + Issuers []string `json:"issuers,omitempty"` + + // The password to use. Used only if the grant type is Password. + Password *string `json:"password,omitempty"` + + // The configuration of an OAuth2 token request + Request *Oauth2AuthenticationPropertiesRequest `json:"request,omitempty"` + + // The scopes, if any, to request the token for. + Scopes []string `json:"scopes,omitempty"` + + // The security token that represents the identity of the party on behalf of whom + // the request is being made. + Subject *Oauth2Token `json:"subject,omitempty"` + + // The username to use. Used only if the grant type is Password. + Username *string `json:"username,omitempty"` +} + +// The definition of an OAuth2 client. +type Oauth2AuthenticationPropertiesClient struct { + // A JWT containing a signed assertion with your application credentials. + Assertion *string `json:"assertion,omitempty"` + + // The authentication method to use to authenticate the client. + Authentication Oauth2AuthenticationPropertiesClientAuthentication `json:"authentication,omitempty"` + + // The client id to use. + Id *string `json:"id,omitempty"` + + // The client secret to use, if any. + Secret *string `json:"secret,omitempty"` +} + +type Oauth2AuthenticationPropertiesClientAuthentication string + +const Oauth2AuthenticationPropertiesClientAuthenticationClientSecretBasic Oauth2AuthenticationPropertiesClientAuthentication = "client_secret_basic" +const Oauth2AuthenticationPropertiesClientAuthenticationClientSecretJwt Oauth2AuthenticationPropertiesClientAuthentication = "client_secret_jwt" +const Oauth2AuthenticationPropertiesClientAuthenticationClientSecretPost Oauth2AuthenticationPropertiesClientAuthentication = "client_secret_post" +const Oauth2AuthenticationPropertiesClientAuthenticationNone Oauth2AuthenticationPropertiesClientAuthentication = "none" +const Oauth2AuthenticationPropertiesClientAuthenticationPrivateKeyJwt Oauth2AuthenticationPropertiesClientAuthentication = "private_key_jwt" + +var enumValues_Oauth2AuthenticationPropertiesClientAuthentication = []interface{}{ + "client_secret_basic", + "client_secret_post", + "client_secret_jwt", + "private_key_jwt", + "none", +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2AuthenticationPropertiesClientAuthentication) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { + return err + } + var ok bool + for _, expected := range enumValues_Oauth2AuthenticationPropertiesClientAuthentication { + if reflect.DeepEqual(v, expected) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_Oauth2AuthenticationPropertiesClientAuthentication, v) + } + *j = Oauth2AuthenticationPropertiesClientAuthentication(v) + return nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2AuthenticationPropertiesClient) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + type Plain Oauth2AuthenticationPropertiesClient + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + if v, ok := raw["authentication"]; !ok || v == nil { + plain.Authentication = "client_secret_post" + } + *j = Oauth2AuthenticationPropertiesClient(plain) + return nil +} + +type Oauth2AuthenticationPropertiesGrant string + +const Oauth2AuthenticationPropertiesGrantAuthorizationCode Oauth2AuthenticationPropertiesGrant = "authorization_code" +const Oauth2AuthenticationPropertiesGrantClientCredentials Oauth2AuthenticationPropertiesGrant = "client_credentials" +const Oauth2AuthenticationPropertiesGrantPassword Oauth2AuthenticationPropertiesGrant = "password" +const Oauth2AuthenticationPropertiesGrantRefreshToken Oauth2AuthenticationPropertiesGrant = "refresh_token" +const Oauth2AuthenticationPropertiesGrantUrnIetfParamsOauthGrantTypeTokenExchange Oauth2AuthenticationPropertiesGrant = "urn:ietf:params:oauth:grant-type:token-exchange" + +var enumValues_Oauth2AuthenticationPropertiesGrant = []interface{}{ + "authorization_code", + "client_credentials", + "password", + "refresh_token", + "urn:ietf:params:oauth:grant-type:token-exchange", +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2AuthenticationPropertiesGrant) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { + return err + } + var ok bool + for _, expected := range enumValues_Oauth2AuthenticationPropertiesGrant { + if reflect.DeepEqual(v, expected) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_Oauth2AuthenticationPropertiesGrant, v) + } + *j = Oauth2AuthenticationPropertiesGrant(v) + return nil +} + +// The configuration of an OAuth2 token request +type Oauth2AuthenticationPropertiesRequest struct { + // Encoding corresponds to the JSON schema field "encoding". + Encoding Oauth2AuthenticationPropertiesRequestEncoding `json:"encoding,omitempty"` +} + +type Oauth2AuthenticationPropertiesRequestEncoding string + +const Oauth2AuthenticationPropertiesRequestEncodingApplicationJson Oauth2AuthenticationPropertiesRequestEncoding = "application/json" +const Oauth2AuthenticationPropertiesRequestEncodingApplicationXWwwFormUrlencoded Oauth2AuthenticationPropertiesRequestEncoding = "application/x-www-form-urlencoded" + +var enumValues_Oauth2AuthenticationPropertiesRequestEncoding = []interface{}{ + "application/x-www-form-urlencoded", + "application/json", +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2AuthenticationPropertiesRequestEncoding) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { + return err + } + var ok bool + for _, expected := range enumValues_Oauth2AuthenticationPropertiesRequestEncoding { + if reflect.DeepEqual(v, expected) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_Oauth2AuthenticationPropertiesRequestEncoding, v) + } + *j = Oauth2AuthenticationPropertiesRequestEncoding(v) + return nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2AuthenticationPropertiesRequest) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + type Plain Oauth2AuthenticationPropertiesRequest + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + if v, ok := raw["encoding"]; !ok || v == nil { + plain.Encoding = "application/x-www-form-urlencoded" + } + *j = Oauth2AuthenticationPropertiesRequest(plain) + return nil +} + +// Represents an OAuth2 token. +type Oauth2Token struct { + // The security token to use. + Token string `json:"token"` + + // The type of the security token to use. + Type string `json:"type"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Oauth2Token) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["token"]; raw != nil && !ok { + return fmt.Errorf("field token in Oauth2Token: required") + } + if _, ok := raw["type"]; raw != nil && !ok { + return fmt.Errorf("field type in Oauth2Token: required") + } + type Plain Oauth2Token + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Oauth2Token(plain) + return nil +} + +// Configures the output of a workflow or task. +type Output struct { + // A runtime expression, if any, used to mutate and/or filter the output of the + // workflow or task. + As interface{} `json:"as,omitempty"` + + // The schema used to describe and validate the output of the workflow or task. + Schema *Schema `json:"schema,omitempty"` +} + +// Represents a referenceable authentication policy. +type ReferenceableAuthenticationPolicy map[string]interface{} + +// Defines a retry policy. +type RetryPolicy struct { + // The retry duration backoff. + Backoff RetryPolicyBackoff `json:"backoff,omitempty"` + + // The duration to wait between retry attempts. + Delay RetryPolicyDelay `json:"delay,omitempty"` + + // A runtime expression used to determine whether or not to retry running the + // task, in a given context. + ExceptWhen *string `json:"exceptWhen,omitempty"` + + // The parameters, if any, that control the randomness or variability of the delay + // between retry attempts. + Jitter *RetryPolicyJitter `json:"jitter,omitempty"` + + // The retry limit, if any. + Limit *RetryPolicyLimit `json:"limit,omitempty"` + + // A runtime expression, if any, used to determine whether or not to retry running + // the task, in a given context. + When *string `json:"when,omitempty"` +} + +// The retry duration backoff. +type RetryPolicyBackoff map[string]interface{} + +// The duration to wait between retry attempts. +type RetryPolicyDelay interface{} + +// The parameters, if any, that control the randomness or variability of the delay +// between retry attempts. +type RetryPolicyJitter struct { + // The minimum duration of the jitter range. + From RetryPolicyJitterFrom `json:"from"` + + // The maximum duration of the jitter range. + To RetryPolicyJitterTo `json:"to"` +} + +// The minimum duration of the jitter range. +type RetryPolicyJitterFrom interface{} + +// The maximum duration of the jitter range. +type RetryPolicyJitterTo interface{} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *RetryPolicyJitter) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["from"]; raw != nil && !ok { + return fmt.Errorf("field from in RetryPolicyJitter: required") + } + if _, ok := raw["to"]; raw != nil && !ok { + return fmt.Errorf("field to in RetryPolicyJitter: required") + } + type Plain RetryPolicyJitter + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = RetryPolicyJitter(plain) + return nil +} + +// The retry limit, if any. +type RetryPolicyLimit struct { + // Attempt corresponds to the JSON schema field "attempt". + Attempt *RetryPolicyLimitAttempt `json:"attempt,omitempty"` + + // The duration limit, if any, for all retry attempts. + Duration RetryPolicyLimitDuration `json:"duration,omitempty"` +} + +type RetryPolicyLimitAttempt struct { + // The maximum amount of retry attempts, if any. + Count *int `json:"count,omitempty"` + + // The maximum duration for each retry attempt. + Duration RetryPolicyLimitAttemptDuration `json:"duration,omitempty"` +} + +// The maximum duration for each retry attempt. +type RetryPolicyLimitAttemptDuration interface{} + +// The duration limit, if any, for all retry attempts. +type RetryPolicyLimitDuration interface{} + +// A runtime expression. +type RuntimeExpression string + +// Represents the definition of a schema. +type Schema struct { + // The schema's format. Defaults to 'json'. The (optional) version of the format + // can be set using `{format}:{version}`. + Format string `json:"format,omitempty"` +} + +// Serverless Workflow DSL - Workflow Schema. +type SchemaJson struct { + // Defines the task(s) the workflow must perform. + Do TaskList `json:"do"` + + // Documents the workflow. + Document SchemaJsonDocument `json:"document"` + + // Configures the workflow's input. + Input *Input `json:"input,omitempty"` + + // Configures the workflow's output. + Output *Output `json:"output,omitempty"` + + // Schedules the workflow. + Schedule *SchemaJsonSchedule `json:"schedule,omitempty"` + + // Timeout corresponds to the JSON schema field "timeout". + Timeout interface{} `json:"timeout,omitempty"` + + // Defines the workflow's reusable components. + Use *SchemaJsonUse `json:"use,omitempty"` +} + +// Documents the workflow. +type SchemaJsonDocument struct { + // The version of the DSL used by the workflow. + Dsl string `json:"dsl"` + + // Holds additional information about the workflow. + Metadata SchemaJsonDocumentMetadata `json:"metadata,omitempty"` + + // The workflow's name. + Name string `json:"name"` + + // The workflow's namespace. + Namespace string `json:"namespace"` + + // The workflow's Markdown summary. + Summary *string `json:"summary,omitempty"` + + // A key/value mapping of the workflow's tags, if any. + Tags SchemaJsonDocumentTags `json:"tags,omitempty"` + + // The workflow's title. + Title *string `json:"title,omitempty"` + + // The workflow's semantic version. + Version string `json:"version"` +} + +// Holds additional information about the workflow. +type SchemaJsonDocumentMetadata map[string]interface{} + +// A key/value mapping of the workflow's tags, if any. +type SchemaJsonDocumentTags map[string]interface{} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *SchemaJsonDocument) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["dsl"]; raw != nil && !ok { + return fmt.Errorf("field dsl in SchemaJsonDocument: required") + } + if _, ok := raw["name"]; raw != nil && !ok { + return fmt.Errorf("field name in SchemaJsonDocument: required") + } + if _, ok := raw["namespace"]; raw != nil && !ok { + return fmt.Errorf("field namespace in SchemaJsonDocument: required") + } + if _, ok := raw["version"]; raw != nil && !ok { + return fmt.Errorf("field version in SchemaJsonDocument: required") + } + type Plain SchemaJsonDocument + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = SchemaJsonDocument(plain) + return nil +} + +// Schedules the workflow. +type SchemaJsonSchedule struct { + // Specifies a delay duration that the workflow must wait before starting again + // after it completes. + After SchemaJsonScheduleAfter `json:"after,omitempty"` + + // Specifies the schedule using a cron expression, e.g., '0 0 * * *' for daily at + // midnight. + Cron *string `json:"cron,omitempty"` + + // Specifies the duration of the interval at which the workflow should be + // executed. + Every SchemaJsonScheduleEvery `json:"every,omitempty"` + + // Specifies the events that trigger the workflow execution. + On EventConsumptionStrategy `json:"on,omitempty"` +} + +// Specifies a delay duration that the workflow must wait before starting again +// after it completes. +type SchemaJsonScheduleAfter interface{} + +// Specifies the duration of the interval at which the workflow should be executed. +type SchemaJsonScheduleEvery interface{} + +// Defines the workflow's reusable components. +type SchemaJsonUse struct { + // The workflow's reusable authentication policies. + Authentications SchemaJsonUseAuthentications `json:"authentications,omitempty"` + + // The workflow's reusable errors. + Errors SchemaJsonUseErrors `json:"errors,omitempty"` + + // The workflow's extensions. + Extensions []SchemaJsonUseExtensionsElem `json:"extensions,omitempty"` + + // The workflow's reusable functions. + Functions SchemaJsonUseFunctions `json:"functions,omitempty"` + + // The workflow's reusable retry policies. + Retries SchemaJsonUseRetries `json:"retries,omitempty"` + + // The workflow's reusable secrets. + Secrets []string `json:"secrets,omitempty"` + + // The workflow's reusable timeouts. + Timeouts SchemaJsonUseTimeouts `json:"timeouts,omitempty"` +} + +// The workflow's reusable authentication policies. +type SchemaJsonUseAuthentications map[string]AuthenticationPolicy + +// The workflow's reusable errors. +type SchemaJsonUseErrors map[string]Error + +type SchemaJsonUseExtensionsElem map[string]Extension + +// The workflow's reusable functions. +type SchemaJsonUseFunctions map[string]interface{} + +// The workflow's reusable retry policies. +type SchemaJsonUseRetries map[string]RetryPolicy + +// The workflow's reusable timeouts. +type SchemaJsonUseTimeouts map[string]Timeout + +// UnmarshalJSON implements json.Unmarshaler. +func (j *SchemaJson) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["do"]; raw != nil && !ok { + return fmt.Errorf("field do in SchemaJson: required") + } + if _, ok := raw["document"]; raw != nil && !ok { + return fmt.Errorf("field document in SchemaJson: required") + } + type Plain SchemaJson + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = SchemaJson(plain) + return nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Schema) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + type Plain Schema + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + if v, ok := raw["format"]; !ok || v == nil { + plain.Format = "json" + } + *j = Schema(plain) + return nil +} + +// Represents an authentication policy based on secrets. +type SecretBasedAuthenticationPolicy struct { + // The name of the authentication policy to use. + Use string `json:"use"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *SecretBasedAuthenticationPolicy) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["use"]; raw != nil && !ok { + return fmt.Errorf("field use in SecretBasedAuthenticationPolicy: required") + } + type Plain SecretBasedAuthenticationPolicy + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + if len(plain.Use) < 1 { + return fmt.Errorf("field %s length: must be >= %d", "use", 1) + } + *j = SecretBasedAuthenticationPolicy(plain) + return nil +} + +// A discrete unit of work that contributes to achieving the overall objectives +// defined by the workflow. +type Task interface{} + +// An object inherited by all tasks. +type TaskBase struct { + // Export task output to context. + Export *Export `json:"export,omitempty"` + + // A runtime expression, if any, used to determine whether or not the task should + // be run. + If *string `json:"if,omitempty"` + + // Configure the task's input. + Input *Input `json:"input,omitempty"` + + // Holds additional information about the task. + Metadata TaskBaseMetadata `json:"metadata,omitempty"` + + // Configure the task's output. + Output *Output `json:"output,omitempty"` + + // The flow directive to be performed upon completion of the task. + Then TaskBaseThen `json:"then,omitempty"` + + // Timeout corresponds to the JSON schema field "timeout". + Timeout interface{} `json:"timeout,omitempty"` +} + +// Holds additional information about the task. +type TaskBaseMetadata map[string]interface{} + +// The flow directive to be performed upon completion of the task. +type TaskBaseThen interface{} + +// List of named tasks to perform. +type TaskList []map[string]interface{} + +// The definition of a timeout. +type Timeout struct { + // The duration after which to timeout. + After TimeoutAfter `json:"after"` +} + +// The duration after which to timeout. +type TimeoutAfter interface{} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Timeout) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["after"]; raw != nil && !ok { + return fmt.Errorf("field after in Timeout: required") + } + type Plain Timeout + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Timeout(plain) + return nil +}