From cf5fe74db6992d9f51f5073bbcf20c8c346357a1 Mon Sep 17 00:00:00 2001 From: Guilherme Caponetto <638737+caponetto@users.noreply.github.com> Date: Wed, 17 Jan 2024 12:32:09 -0300 Subject: [PATCH] feat(orchestrator): add orchestrator plugin (#783) * feat(orchestrator): add orchestrator plugins Squashed and rebased. Credits to - Guilherme Caponetto - Tiago Dolphine - Michael Anstis who did the work before squash and rebase. * feat(orchestrator): enable dynamic plugin * fix(orchestrator): load HostDirectory from backend-app-api (#28) Signed-off-by: Moti Asayag (cherry picked from commit 3ba02c6e5b35966d93fe18d7bf2e7b40ca00b850) * fix(orchestrator): make the port config optional (#38) * feat(orchestrator): handle assessment workflow and make optional the workflowsSource config (#35) * feat: issue FLPATH-591 - Add assessment workflow type and display outputs (#21) * Configure for assessment swf * filter assessment on workflow definition page * workflow columns * workflow columns * workflow columns * workflow filter * feat: Render assessment results supporting dynamic categories * issue FLPATH-657: workflow label * Revert "Configure for assessment swf" This reverts commit b4048e11939bf86dd881536513d8ea4764999d0b. * Revert sonata service port * Add key for workflow options category * Fix workflow execution from choose btn * Fix review comments * address comments * Fix review comment to switch to useRouteRef * fix review comments * fix review comments --------- Co-authored-by: richardwang98 * feat: make optional the workflowsSource config (#25) --------- Co-authored-by: anludke * feat(orchestrator): add backend endpoint for getting workflows from data index service (#39) * feat: add backend for getting work flows from data index service * feat: cherrypick - add backend for getting work flows from data index service * feat: cherrypick - add backend for getting work flows from data index service * feat: cherrypick - add backend for getting work flows from data index service * chore(orchestrator): update dependencies * feat(orchestrator): orchestrator plugin entry page & workflows table (#31) https://issues.redhat.com/browse/FLPATH-686 https://issues.redhat.com/browse/FLPATH-682 * feat: introduced the workflows/overview endpoint (#36) * FLPATH-702 : New endpoint to fetch workflow overview * FLPATH-702 : iterate through all elements instead of picking the first element * modified the map method * calculate avg execution time change * Add log message when http calls failing * Fix lint issues * rename variable * paginated graphql object fetch * Include workflowId in the result object * Renamed to description * to epoc timestamp * suppress eslint warning * epoc number to string * make all string fields optional * use a simple for loop * extracted to a new method * rename to lastTriggeredMs * include uri in the WorkflowOverview type * mark avgDurationMs as optional * feat(orchestrator): introduced `workflows/:workflowId/overview` endpoint (#40) * FLPATH-702 : New endpoint to fetch workflow overview * FLPATH-702 : iterate through all elements instead of picking the first element * modified the map method * calculate avg execution time change * Add log message when http calls failing * Fix lint issues * rename variable * paginated graphql object fetch * Include workflowId in the result object * Renamed to description * to epoc timestamp * suppress eslint warning * epoc number to string * make all string fields optional * use a simple for loop * extracted to a new method * rename to lastTriggeredMs * include uri in the WorkflowOverview type * mark avgDurationMs as optional * fetch only one overview obj * fix(orchestrator): make the port config optional (#37) * feat(orchestrator): orchestrator plugin entry page & workflows table (#31) https://issues.redhat.com/browse/FLPATH-686 https://issues.redhat.com/browse/FLPATH-682 * removed unnecessary method --------- Co-authored-by: Guilherme Caponetto <638737+caponetto@users.noreply.github.com> Co-authored-by: Jonathan Kilzi * feat(orchestrator): add Workflow Run List (#30) * feat(orchestrator): add Workflow Run List FLPATH-693 A component listing running workflows is added. * chore: move loading logic out of the OrchestratorPage * chore(orchestrator): implement the WorkflowViewerFormatter (#41) * chore: implement the WorkflowViewerFormatter: utility for converting WorkflowOverview backend data to data UI can display * removed redundant export and removed redundant 'Interface' suffix from DataFormatter interface name * Update plugins/orchestrator/src/dataFormatters/DataFormatter.ts Co-authored-by: Guilherme Caponetto <638737+caponetto@users.noreply.github.com> --------- Co-authored-by: Guilherme Caponetto <638737+caponetto@users.noreply.github.com> * feat(orchestrator): fetch data input schema from `/management/processes` (#45) * Fetch data input schema from /management/processes * Fix some code smells * Rename WorkflowProcess -> WorkflowInfo * feat(orchestrator): implement the workflow viewer new UX (#32) * feat(orechestrator): implement the workflow viewer new UX * code review fixes * visual fixes including skeleton for loading state * chore(orchestrator): Renames the components displaying the tabs content * chore(orchestrator): updates the OrchestratorClient Lazy loads the baseUrl Adds a method for calling GET /workflows/overview * feat(orchestrator): orchestrator workflow execution page (#46) Co-authored-by: Jonathan Kilzi * chore(orchestrator): updates the stories grouping (#50) * chore(orchestrator): aligns the workflows table with the design (#51) * chore(orchestrator): add Category to the procesInstance result (#52) * fix(orchestrator): add links to workflows in the workflow list (#53) * feat: added color icon to workflow details page last run status field (#55) * feat(orchestrator): enable usage of local envelope for the workflow editor (#56) * feat(orchestrator): add business key to assessment and pass on to workflow options (#42) * feat(orchestrator): add page listing details of a workflow run (#49) * feat(orchestrator): add feature flag for developer mode and config to enable/disable the integration with catalog (#58) * feat(orchestrator): workflow editor modal (#59) * feat(orchestrator): fix missing workflow type in overview (#60) Signed-off-by: Gloria Ciavarrini * fix(orchestrator): fix some code smells (#63) * fix(orchestrator): addresses sonarcloud issues (#65) * fix(orchestrator): addressed a couple of issues (#64) * fix(orchestrator): minor fixes (#67) * feat(orchestrator): refactoring to use data-index to fetch workflow definitions (#57) Co-authored-by: Guilherme Caponetto <638737+caponetto@users.noreply.github.com> * fix(orchestrator): fix sonar issue (#69) * feat(orchestrator): use assessment process id as bk for next workflows (#70) * feat(orchestrator): execute workflow page new UX (#68) * fix(orchestrator): theme for `monaco-editor` in `JsonTextAreaForm` component and mandatory `dataIndexService.url` (#71) * fix(orchestrator): remove dependency on devmode in the entity provider (#72) * fix(orchestrator): skip it if hardcoded specs do not exist (#74) * feat(orchestrator): execute workflow page polishing (#75) * feat(orchestrator): workflow instance result page (#73) * chore(orchestrator): add codeowners to orchestrator * fix(FLPATH-852): show WF description on the execution result page (#77) * chore(orchestrator): add OWNERS file to each orchestrator package * feat(orchestrator): migrates to the new UI (#78) --------- Signed-off-by: Gloria Ciavarrini Co-authored-by: Moti Asayag Co-authored-by: richard wang <58698556+RichardW98@users.noreply.github.com> Co-authored-by: anludke Co-authored-by: rhkp <87712456+rhkp@users.noreply.github.com> Co-authored-by: Jonathan Kilzi Co-authored-by: Jude Niroshan Co-authored-by: Marek Libra Co-authored-by: Bat-Zion Rotman Co-authored-by: yu zhao Co-authored-by: Gloria Ciavarrini Co-authored-by: Tiago Dolphine --- .github/CODEOWNERS | 4 +- .gitignore | 8 +- package.json | 7 +- packages/backend/package.json | 2 + packages/backend/src/index.ts | 10 + packages/backend/src/types.ts | 4 + plugins/orchestrator-backend/.eslintrc.js | 1 + plugins/orchestrator-backend/CHANGELOG.md | 0 plugins/orchestrator-backend/OWNERS | 6 + plugins/orchestrator-backend/README.md | 5 + .../app-config.janus-idp.yaml | 4 + plugins/orchestrator-backend/dev/index.ts | 51 + .../dist-dynamic/package.json | 94 + .../dist-dynamic/yarn.lock | 1075 +++++++ plugins/orchestrator-backend/package.json | 95 + .../src/OrchestratorPlugin.ts | 44 + plugins/orchestrator-backend/src/alpha.ts | 1 + .../orchestrator-backend/src/dynamic/alpha.ts | 9 + .../orchestrator-backend/src/dynamic/index.ts | 35 + .../src/helpers/errorBuilder.ts | 34 + plugins/orchestrator-backend/src/index.ts | 4 + .../OrchestratorModuleEntityProvider.ts | 44 + .../orchestrator-backend/src/module/index.ts | 1 + .../provider/OrchestratorEntityProvider.ts | 172 + .../src/provider/index.ts | 1 + .../src/routerWrapper/index.ts | 57 + plugins/orchestrator-backend/src/run.ts | 42 + .../src/service/CloudEventService.ts | 36 + .../src/service/DataIndexService.ts | 225 ++ .../src/service/DataInputSchemaService.ts | 1236 +++++++ .../src/service/GitService.ts | 93 + .../src/service/Helper.ts | 60 + .../src/service/JiraService.ts | 78 + .../src/service/OpenApiService.ts | 134 + .../src/service/ScaffolderService.ts | 103 + .../src/service/SonataFlowService.ts | 456 +++ .../src/service/WorkflowService.ts | 224 ++ .../src/service/openapi-template.json | 79 + .../src/service/router.ts | 430 +++ .../src/types/apiResponse.ts | 31 + plugins/orchestrator-backend/tsconfig.json | 9 + plugins/orchestrator-backend/turbo.json | 9 + plugins/orchestrator-common/.eslintrc.js | 1 + plugins/orchestrator-common/CHANGELOG.md | 0 plugins/orchestrator-common/OWNERS | 6 + plugins/orchestrator-common/README.md | 5 + plugins/orchestrator-common/config.d.ts | 116 + plugins/orchestrator-common/package.json | 48 + plugins/orchestrator-common/src/constants.ts | 77 + plugins/orchestrator-common/src/index.ts | 4 + plugins/orchestrator-common/src/models.ts | 110 + plugins/orchestrator-common/src/types.ts | 103 + plugins/orchestrator-common/src/workflow.ts | 79 + plugins/orchestrator-common/tsconfig.json | 9 + plugins/orchestrator-common/turbo.json | 9 + plugins/orchestrator/.eslintrc.js | 1 + plugins/orchestrator/CHANGELOG.md | 0 plugins/orchestrator/OWNERS | 6 + plugins/orchestrator/README.md | 252 ++ .../orchestrator/app-config.janus-idp.yaml | 14 + plugins/orchestrator/dev/index.tsx | 14 + plugins/orchestrator/package.json | 118 + .../src/__fixtures__/fakeFeatureFlagsApi.ts | 22 + .../src/__fixtures__/fakeNodeInstances.ts | 112 + .../src/__fixtures__/fakeProcessInstance.ts | 105 + .../src/__fixtures__/fakeSpecs.ts | 2842 +++++++++++++++++ ...keWorkflowDataInputSchemaDifferentTypes.ts | 169 + .../fakeWorkflowDataInputSchemaResponse.ts | 94 + ...orkflowDataInputSchemaResponseMultiStep.ts | 556 ++++ .../src/__fixtures__/fakeWorkflowItem.ts | 247 ++ .../src/__fixtures__/fakeWorkflowOverview.ts | 13 + .../__fixtures__/fakeWorkflowOverviewList.ts | 100 + .../src/__fixtures__/fakeWorkflowSpecs.ts | 2842 +++++++++++++++++ .../src/__fixtures__/veryLongString.ts | 61 + .../src/api/MockOrchestratorClient.ts | 193 ++ .../src/api/OrchestratorClient.ts | 178 ++ plugins/orchestrator/src/api/api.ts | 54 + plugins/orchestrator/src/api/index.ts | 3 + .../src/components/BaseOrchestratorPage.tsx | 32 + .../src/components/CreateWorkflowPage.tsx | 144 + .../src/components/EditWorkflowDialog.tsx | 100 + .../ExecuteWorkflowPage.stories.tsx | 103 + .../ExecuteWorkflowPage.tsx | 133 + .../ExecuteWorkflowPage/JsonTextAreaForm.tsx | 70 + .../ExecuteWorkflowPage/StepperForm.tsx | 190 ++ .../src/components/NewWorkflowViewerPage.tsx | 193 ++ .../components/OrchestratorPage.stories.tsx | 96 + .../src/components/OrchestratorPage.tsx | 26 + .../orchestrator/src/components/Paragraph.tsx | 18 + .../orchestrator/src/components/Router.tsx | 70 + .../OrchestratorScaffolderTemplateCard.tsx | 37 + .../ScaffolderTemplateCard/index.ts | 1 + .../orchestrator/src/components/Selector.tsx | 68 + .../src/components/SubmitButton.tsx | 37 + .../WorkflowDefinitionDetailsCard.tsx | 104 + .../WorkflowDefinitionViewerPage.stories.tsx | 121 + .../WorkflowDefinitionViewerPage.tsx | 135 + .../WorkflowDefinitionViewerPage/index.ts | 1 + .../src/components/WorkflowDialog.tsx | 75 + .../WorkflowEditor/WorkflowEditor.tsx | 474 +++ .../channel/WorkflowEditorLanguageService.ts | 72 + ...flowEditorLanguageServiceChannelApiImpl.ts | 35 + .../src/components/WorkflowEditor/index.ts | 2 + .../WorkflowInstancePage.stories.tsx | 116 + .../src/components/WorkflowInstancePage.tsx | 91 + .../WorkflowInstancePageContent.tsx | 239 ++ .../WorkflowInstanceStatusIndicator.tsx | 23 + .../components/WorkflowProgress.stories.tsx | 35 + .../src/components/WorkflowProgress.tsx | 37 + .../src/components/WorkflowProgressNode.tsx | 90 + .../components/WorkflowProgressNodeModel.ts | 49 + .../src/components/WorkflowRunDetail.ts | 19 + .../WorkflowRunsTabContent.stories.tsx | 57 + .../src/components/WorkflowRunsTabContent.tsx | 136 + .../components/WorkflowVariablesViewer.tsx | 42 + .../src/components/WorkflowsTabContent.tsx | 93 + .../src/components/WorkflowsTable.stories.tsx | 58 + .../src/components/WorkflowsTable.tsx | 186 ++ plugins/orchestrator/src/constants.ts | 1 + .../src/dataFormatters/DataFormatter.ts | 5 + .../WorkflowOverviewFormatter.test.ts | 53 + .../WorkflowOverviewFormatter.ts | 69 + .../hooks/useWorkflowInstanceStatusColors.ts | 34 + plugins/orchestrator/src/index.ts | 14 + plugins/orchestrator/src/plugin.ts | 36 + plugins/orchestrator/src/routes.ts | 53 + .../src/utils/NodeInstanceUtils.test.ts | 198 ++ .../src/utils/NodeInstanceUtils.ts | 38 + plugins/orchestrator/src/utils/StringUtils.ts | 8 + plugins/orchestrator/src/utils/TypeGuards.ts | 9 + plugins/orchestrator/src/utils/errorUtils.ts | 11 + plugins/orchestrator/tsconfig.json | 10 + plugins/orchestrator/turbo.json | 9 + ...erlessWorkflowCombinedEditorEnvelopeApp.ts | 31 + ...verlessWorkflowDiagramEditorEnvelopeApp.ts | 33 + ...ServerlessWorkflowTextEditorEnvelopeApp.ts | 29 + ...ess-workflow-combined-editor-envelope.html | 27 + ...less-workflow-diagram-editor-envelope.html | 26 + ...verless-workflow-text-editor-envelope.html | 26 + .../webpack.config.js | 173 + yarn.lock | 2776 +++++++++++++++- 141 files changed, 20789 insertions(+), 64 deletions(-) create mode 100644 plugins/orchestrator-backend/.eslintrc.js create mode 100644 plugins/orchestrator-backend/CHANGELOG.md create mode 100644 plugins/orchestrator-backend/OWNERS create mode 100644 plugins/orchestrator-backend/README.md create mode 100644 plugins/orchestrator-backend/app-config.janus-idp.yaml create mode 100644 plugins/orchestrator-backend/dev/index.ts create mode 100644 plugins/orchestrator-backend/dist-dynamic/package.json create mode 100644 plugins/orchestrator-backend/dist-dynamic/yarn.lock create mode 100644 plugins/orchestrator-backend/package.json create mode 100644 plugins/orchestrator-backend/src/OrchestratorPlugin.ts create mode 100644 plugins/orchestrator-backend/src/alpha.ts create mode 100644 plugins/orchestrator-backend/src/dynamic/alpha.ts create mode 100644 plugins/orchestrator-backend/src/dynamic/index.ts create mode 100644 plugins/orchestrator-backend/src/helpers/errorBuilder.ts create mode 100644 plugins/orchestrator-backend/src/index.ts create mode 100644 plugins/orchestrator-backend/src/module/OrchestratorModuleEntityProvider.ts create mode 100644 plugins/orchestrator-backend/src/module/index.ts create mode 100644 plugins/orchestrator-backend/src/provider/OrchestratorEntityProvider.ts create mode 100644 plugins/orchestrator-backend/src/provider/index.ts create mode 100644 plugins/orchestrator-backend/src/routerWrapper/index.ts create mode 100644 plugins/orchestrator-backend/src/run.ts create mode 100644 plugins/orchestrator-backend/src/service/CloudEventService.ts create mode 100644 plugins/orchestrator-backend/src/service/DataIndexService.ts create mode 100644 plugins/orchestrator-backend/src/service/DataInputSchemaService.ts create mode 100644 plugins/orchestrator-backend/src/service/GitService.ts create mode 100644 plugins/orchestrator-backend/src/service/Helper.ts create mode 100644 plugins/orchestrator-backend/src/service/JiraService.ts create mode 100644 plugins/orchestrator-backend/src/service/OpenApiService.ts create mode 100644 plugins/orchestrator-backend/src/service/ScaffolderService.ts create mode 100644 plugins/orchestrator-backend/src/service/SonataFlowService.ts create mode 100644 plugins/orchestrator-backend/src/service/WorkflowService.ts create mode 100644 plugins/orchestrator-backend/src/service/openapi-template.json create mode 100644 plugins/orchestrator-backend/src/service/router.ts create mode 100644 plugins/orchestrator-backend/src/types/apiResponse.ts create mode 100644 plugins/orchestrator-backend/tsconfig.json create mode 100644 plugins/orchestrator-backend/turbo.json create mode 100644 plugins/orchestrator-common/.eslintrc.js create mode 100644 plugins/orchestrator-common/CHANGELOG.md create mode 100644 plugins/orchestrator-common/OWNERS create mode 100644 plugins/orchestrator-common/README.md create mode 100644 plugins/orchestrator-common/config.d.ts create mode 100644 plugins/orchestrator-common/package.json create mode 100644 plugins/orchestrator-common/src/constants.ts create mode 100644 plugins/orchestrator-common/src/index.ts create mode 100644 plugins/orchestrator-common/src/models.ts create mode 100644 plugins/orchestrator-common/src/types.ts create mode 100644 plugins/orchestrator-common/src/workflow.ts create mode 100644 plugins/orchestrator-common/tsconfig.json create mode 100644 plugins/orchestrator-common/turbo.json create mode 100644 plugins/orchestrator/.eslintrc.js create mode 100644 plugins/orchestrator/CHANGELOG.md create mode 100644 plugins/orchestrator/OWNERS create mode 100644 plugins/orchestrator/README.md create mode 100644 plugins/orchestrator/app-config.janus-idp.yaml create mode 100644 plugins/orchestrator/dev/index.tsx create mode 100644 plugins/orchestrator/package.json create mode 100644 plugins/orchestrator/src/__fixtures__/fakeFeatureFlagsApi.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeNodeInstances.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeProcessInstance.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeSpecs.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaDifferentTypes.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponse.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponseMultiStep.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowItem.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowOverview.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowOverviewList.ts create mode 100644 plugins/orchestrator/src/__fixtures__/fakeWorkflowSpecs.ts create mode 100644 plugins/orchestrator/src/__fixtures__/veryLongString.ts create mode 100644 plugins/orchestrator/src/api/MockOrchestratorClient.ts create mode 100644 plugins/orchestrator/src/api/OrchestratorClient.ts create mode 100644 plugins/orchestrator/src/api/api.ts create mode 100644 plugins/orchestrator/src/api/index.ts create mode 100644 plugins/orchestrator/src/components/BaseOrchestratorPage.tsx create mode 100644 plugins/orchestrator/src/components/CreateWorkflowPage.tsx create mode 100644 plugins/orchestrator/src/components/EditWorkflowDialog.tsx create mode 100644 plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.stories.tsx create mode 100644 plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.tsx create mode 100644 plugins/orchestrator/src/components/ExecuteWorkflowPage/JsonTextAreaForm.tsx create mode 100644 plugins/orchestrator/src/components/ExecuteWorkflowPage/StepperForm.tsx create mode 100644 plugins/orchestrator/src/components/NewWorkflowViewerPage.tsx create mode 100644 plugins/orchestrator/src/components/OrchestratorPage.stories.tsx create mode 100644 plugins/orchestrator/src/components/OrchestratorPage.tsx create mode 100644 plugins/orchestrator/src/components/Paragraph.tsx create mode 100644 plugins/orchestrator/src/components/Router.tsx create mode 100644 plugins/orchestrator/src/components/ScaffolderTemplateCard/OrchestratorScaffolderTemplateCard.tsx create mode 100644 plugins/orchestrator/src/components/ScaffolderTemplateCard/index.ts create mode 100644 plugins/orchestrator/src/components/Selector.tsx create mode 100644 plugins/orchestrator/src/components/SubmitButton.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowDefinitionViewerPage/WorkflowDefinitionDetailsCard.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowDefinitionViewerPage/WorkflowDefinitionViewerPage.stories.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowDefinitionViewerPage/WorkflowDefinitionViewerPage.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowDefinitionViewerPage/index.ts create mode 100644 plugins/orchestrator/src/components/WorkflowDialog.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowEditor/WorkflowEditor.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowEditor/channel/WorkflowEditorLanguageService.ts create mode 100644 plugins/orchestrator/src/components/WorkflowEditor/channel/WorkflowEditorLanguageServiceChannelApiImpl.ts create mode 100644 plugins/orchestrator/src/components/WorkflowEditor/index.ts create mode 100644 plugins/orchestrator/src/components/WorkflowInstancePage.stories.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowInstancePage.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowInstancePageContent.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowInstanceStatusIndicator.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowProgress.stories.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowProgress.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowProgressNode.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowProgressNodeModel.ts create mode 100644 plugins/orchestrator/src/components/WorkflowRunDetail.ts create mode 100644 plugins/orchestrator/src/components/WorkflowRunsTabContent.stories.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowRunsTabContent.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowVariablesViewer.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowsTabContent.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowsTable.stories.tsx create mode 100644 plugins/orchestrator/src/components/WorkflowsTable.tsx create mode 100644 plugins/orchestrator/src/constants.ts create mode 100644 plugins/orchestrator/src/dataFormatters/DataFormatter.ts create mode 100644 plugins/orchestrator/src/dataFormatters/WorkflowOverviewFormatter.test.ts create mode 100644 plugins/orchestrator/src/dataFormatters/WorkflowOverviewFormatter.ts create mode 100644 plugins/orchestrator/src/hooks/useWorkflowInstanceStatusColors.ts create mode 100644 plugins/orchestrator/src/index.ts create mode 100644 plugins/orchestrator/src/plugin.ts create mode 100644 plugins/orchestrator/src/routes.ts create mode 100644 plugins/orchestrator/src/utils/NodeInstanceUtils.test.ts create mode 100644 plugins/orchestrator/src/utils/NodeInstanceUtils.ts create mode 100644 plugins/orchestrator/src/utils/StringUtils.ts create mode 100644 plugins/orchestrator/src/utils/TypeGuards.ts create mode 100644 plugins/orchestrator/src/utils/errorUtils.ts create mode 100644 plugins/orchestrator/tsconfig.json create mode 100644 plugins/orchestrator/turbo.json create mode 100644 plugins/orchestrator/workflow-editor-envelope/ServerlessWorkflowCombinedEditorEnvelopeApp.ts create mode 100644 plugins/orchestrator/workflow-editor-envelope/ServerlessWorkflowDiagramEditorEnvelopeApp.ts create mode 100644 plugins/orchestrator/workflow-editor-envelope/ServerlessWorkflowTextEditorEnvelopeApp.ts create mode 100644 plugins/orchestrator/workflow-editor-envelope/serverless-workflow-combined-editor-envelope.html create mode 100644 plugins/orchestrator/workflow-editor-envelope/serverless-workflow-diagram-editor-envelope.html create mode 100644 plugins/orchestrator/workflow-editor-envelope/serverless-workflow-text-editor-envelope.html create mode 100644 plugins/orchestrator/workflow-editor-envelope/webpack.config.js diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ac4b43fb1d..cdbb59aa06 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,4 +29,6 @@ yarn.lock @janus-idp/maintainers-plugins /plugins/rbac-common/ @janus-idp/maintainers-plugins @gorkem @AndrienkoAleksandr @PatAKnight /plugins/notifications @janus-idp/maintainers-plugins @mareklibra /plugins/notifications-backend @janus-idp/maintainers-plugins @mareklibra @ydayagi - +/plugins/orchestrator @janus-idp/maintainers-plugins @caponetto @jkilzi +/plugins/orchestrator-backend @janus-idp/maintainers-plugins @caponetto @jkilzi +/plugins/orchestrator-common @janus-idp/maintainers-plugins @caponetto @jkilzi diff --git a/.gitignore b/.gitignore index 2b64e1c753..d11efe63ab 100644 --- a/.gitignore +++ b/.gitignore @@ -52,10 +52,16 @@ site # vscode database functionality support files *.session.sql +.vscode + # turbo .turbo +# idea +.idea +*.iml + # build cache .webpack-cache -.idea/ +.tmp diff --git a/package.json b/package.json index 0174e19094..7a2437ac6b 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,10 @@ "packages": [ "packages/*", "plugins/*" + ], + "nohoist": [ + "@janus-idp/backstage-plugin-orchestrator", + "@janus-idp/backstage-plugin-orchestrator/@kie-tools/**/!(react|react-dom)" ] }, "devDependencies": { @@ -54,7 +58,8 @@ }, "resolutions": { "@types/react": "^17.0.68", - "@types/react-dom": "^17.0.21" + "@types/react-dom": "^17.0.21", + "vscode-languageserver-types": "3.17.1" }, "lint-staged": { "*": "turbo run prettier:fix --", diff --git a/packages/backend/package.json b/packages/backend/package.json index 65628d5914..ba14c04647 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -27,6 +27,8 @@ "@backstage/plugin-auth-backend": "^0.19.3", "@backstage/plugin-auth-node": "^0.4.0", "@backstage/plugin-catalog-backend": "^1.14.0", + "@backstage/plugin-events-backend": "^0.2.8", + "@backstage/plugin-events-node": "^0.2.8", "@backstage/plugin-permission-backend": "^0.5.29", "@backstage/plugin-permission-common": "^0.7.9", "@backstage/plugin-permission-node": "^0.7.17", diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index a15ad6bc79..94a970af4e 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -11,6 +11,7 @@ import { createServiceBuilder, DatabaseManager, getRootLogger, + HostDiscovery, loadBackendConfig, notFoundHandler, ServerTokenManager, @@ -19,8 +20,10 @@ import { useHotMemoize, } from '@backstage/backend-common'; import { TaskScheduler } from '@backstage/backend-tasks'; +import { CatalogClient } from '@backstage/catalog-client'; import { Config } from '@backstage/config'; import { DefaultIdentityClient } from '@backstage/plugin-auth-node'; +import { DefaultEventBroker } from '@backstage/plugin-events-backend'; import { ServerPermissionClient } from '@backstage/plugin-permission-node'; import Router from 'express-promise-router'; @@ -43,6 +46,9 @@ function makeCreateEnv(config: Config) { const databaseManager = DatabaseManager.fromConfig(config, { logger: root }); const tokenManager = ServerTokenManager.fromConfig(config, { logger: root }); const taskScheduler = TaskScheduler.fromConfig(config); + const catalogApi = new CatalogClient({ + discoveryApi: HostDiscovery.fromConfig(config), + }); const identity = DefaultIdentityClient.create({ discovery, @@ -52,6 +58,8 @@ function makeCreateEnv(config: Config) { tokenManager, }); + const eventBroker = new DefaultEventBroker(root.child({ type: 'plugin' })); + root.info(`Created UrlReader ${reader}`); return (plugin: string): PluginEnvironment => { @@ -70,6 +78,8 @@ function makeCreateEnv(config: Config) { scheduler, permissions, identity, + eventBroker, + catalogApi, }; }; } diff --git a/packages/backend/src/types.ts b/packages/backend/src/types.ts index a2c43b45de..e73b093511 100644 --- a/packages/backend/src/types.ts +++ b/packages/backend/src/types.ts @@ -6,8 +6,10 @@ import { UrlReader, } from '@backstage/backend-common'; import { PluginTaskScheduler } from '@backstage/backend-tasks'; +import { CatalogApi } from '@backstage/catalog-client'; import { Config } from '@backstage/config'; import { IdentityApi } from '@backstage/plugin-auth-node'; +import { EventBroker } from '@backstage/plugin-events-node'; import { PermissionEvaluator } from '@backstage/plugin-permission-common'; import { Logger } from 'winston'; @@ -23,4 +25,6 @@ export type PluginEnvironment = { scheduler: PluginTaskScheduler; permissions: PermissionEvaluator; identity: IdentityApi; + eventBroker: EventBroker; + catalogApi: CatalogApi; }; diff --git a/plugins/orchestrator-backend/.eslintrc.js b/plugins/orchestrator-backend/.eslintrc.js new file mode 100644 index 0000000000..e2a53a6ad2 --- /dev/null +++ b/plugins/orchestrator-backend/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/orchestrator-backend/CHANGELOG.md b/plugins/orchestrator-backend/CHANGELOG.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/orchestrator-backend/OWNERS b/plugins/orchestrator-backend/OWNERS new file mode 100644 index 0000000000..e09f7ec35c --- /dev/null +++ b/plugins/orchestrator-backend/OWNERS @@ -0,0 +1,6 @@ +approvers: + - caponetto + - jkilzi +reviewers: + - caponetto + - jkilzi diff --git a/plugins/orchestrator-backend/README.md b/plugins/orchestrator-backend/README.md new file mode 100644 index 0000000000..cf4bfb3439 --- /dev/null +++ b/plugins/orchestrator-backend/README.md @@ -0,0 +1,5 @@ +# Orchestrator Backend Plugin for Backstage + +Welcome to the backend package for the Orchestrator plugin! + +For more information about the Orchestrator plugin, see the [Orchestrator Plugin documentation](https://github.com/janus-idp/backstage-plugins/tree/main/plugins/orchestrator) on GitHub. diff --git a/plugins/orchestrator-backend/app-config.janus-idp.yaml b/plugins/orchestrator-backend/app-config.janus-idp.yaml new file mode 100644 index 0000000000..ebbe040625 --- /dev/null +++ b/plugins/orchestrator-backend/app-config.janus-idp.yaml @@ -0,0 +1,4 @@ +orchestrator: + sonataFlowService: + baseUrl: http://localhost + port: 8080 diff --git a/plugins/orchestrator-backend/dev/index.ts b/plugins/orchestrator-backend/dev/index.ts new file mode 100644 index 0000000000..211ad07fe9 --- /dev/null +++ b/plugins/orchestrator-backend/dev/index.ts @@ -0,0 +1,51 @@ +import { createServiceBuilder, UrlReader } from '@backstage/backend-common'; +import { CatalogApi } from '@backstage/catalog-client'; +import { Config } from '@backstage/config'; +import { DiscoveryApi } from '@backstage/core-plugin-api'; +import { EventBroker } from '@backstage/plugin-events-node'; + +import { Logger } from 'winston'; + +import { Server } from 'http'; + +import { createRouter } from '../src/routerWrapper'; + +export interface ServerOptions { + port: number; + enableCors: boolean; + logger: Logger; + eventBroker: EventBroker; + config: Config; + discovery: DiscoveryApi; + catalogApi: CatalogApi; + urlReader: UrlReader; +} + +export async function startStandaloneServer( + options: ServerOptions, +): Promise { + const logger = options.logger.child({ service: 'orchestrator-backend' }); + logger.debug('Starting application server...'); + const router = await createRouter({ + logger: logger, + eventBroker: options.eventBroker, + config: options.config, + discovery: options.discovery, + catalogApi: options.catalogApi, + urlReader: options.urlReader, + }); + + let service = createServiceBuilder(module) + .setPort(options.port) + .addRouter('/orchestrator', router); + if (options.enableCors) { + service = service.enableCors({ origin: 'http://localhost:3000' }); + } + + return await service.start().catch(err => { + logger.error(err); + process.exit(1); + }); +} + +module.hot?.accept(); diff --git a/plugins/orchestrator-backend/dist-dynamic/package.json b/plugins/orchestrator-backend/dist-dynamic/package.json new file mode 100644 index 0000000000..b0c7314604 --- /dev/null +++ b/plugins/orchestrator-backend/dist-dynamic/package.json @@ -0,0 +1,94 @@ +{ + "name": "@janus-idp/backstage-plugin-orchestrator-backend-dynamic", + "version": "0.0.1", + "license": "Apache-2.0", + "main": "dist/index.cjs.js", + "types": "dist/index.d.ts", + "publishConfig": { + "access": "public", + "main": "dist/index.cjs.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "backend-plugin" + }, + "exports": { + ".": { + "require": "./dist/index.cjs.js", + "default": "./dist/index.cjs.js" + }, + "./alpha": { + "require": "./dist/alpha.cjs.js", + "default": "./dist/alpha.cjs.js" + }, + "./package.json": "./package.json" + }, + "homepage": "https://janus-idp.io/", + "repository": "github:janus-idp/backstage-plugins", + "bugs": "https://github.com/janus-idp/backstage-plugins/issues", + "keywords": [ + "backstage", + "plugin", + "orchestrator", + "workflows" + ], + "files": [ + "app-config.janus-idp.yaml", + "dist", + "alpha" + ], + "scripts": { + "start": "backstage-cli package start", + "build": "backstage-cli package build", + "tsc": "tsc", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test --passWithNoTests --coverage", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack", + "export-dynamic": "janus-cli package export-dynamic-plugin" + }, + "dependencies": { + "@octokit/rest": "^19.0.3", + "@severlessworkflow/sdk-typescript": "^3.0.3", + "@urql/core": "^4.1.4", + "cloudevents": "^8.0.0", + "express": "^4.18.2", + "express-promise-router": "^4.1.1", + "fs-extra": "^10.1.0", + "json-schema": "^0.4.0", + "openapi-types": "^12.1.3", + "winston": "^3.11.0", + "yn": "^5.0.0", + "js-yaml": "^4.1.0" + }, + "devDependencies": {}, + "bundleDependencies": true, + "peerDependencies": { + "@backstage/backend-app-api": "^0.5.8", + "@backstage/backend-common": "^0.19.8", + "@backstage/backend-plugin-api": "^0.6.6", + "@backstage/backend-plugin-manager": "npm:@janus-idp/backend-plugin-manager@0.0.2-janus.5", + "@backstage/backend-tasks": "^0.5.11", + "@backstage/catalog-client": "^1.4.5", + "@backstage/catalog-model": "^1.4.3", + "@backstage/config": "^1.1.1", + "@backstage/core-plugin-api": "^1.7.0", + "@backstage/integration": "^1.7.1", + "@backstage/plugin-catalog-node": "^1.4.7", + "@backstage/plugin-events-backend": "^0.2.8", + "@backstage/plugin-events-node": "^0.2.8", + "@backstage/plugin-scaffolder-backend": "^1.18.0", + "@backstage/plugin-scaffolder-common": "^1.4.2", + "@backstage/plugin-scaffolder-node": "^0.2.6", + "@backstage/types": "^1.1.1" + }, + "overrides": { + "@aws-sdk/util-utf8-browser": { + "@smithy/util-utf8": "^2.0.0" + } + }, + "resolutions": { + "@aws-sdk/util-utf8-browser": "npm:@smithy/util-utf8@~2" + } +} diff --git a/plugins/orchestrator-backend/dist-dynamic/yarn.lock b/plugins/orchestrator-backend/dist-dynamic/yarn.lock new file mode 100644 index 0000000000..5d891476e8 --- /dev/null +++ b/plugins/orchestrator-backend/dist-dynamic/yarn.lock @@ -0,0 +1,1075 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@0no-co/graphql.web@^1.0.1": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@0no-co/graphql.web/-/graphql.web-1.0.4.tgz#9606eb651955499525d068ce0ad8bea596286ce2" + integrity sha512-W3ezhHGfO0MS1PtGloaTpg0PbaT8aZSmmaerL7idtU5F7oCI+uu25k+MsMS31BVFlp4aMkHSrNRxiD72IlK8TA== + +"@aws-sdk/util-utf8-browser@npm:@smithy/util-utf8@~2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.2.tgz#626b3e173ad137208e27ed329d6bea70f4a1a7f7" + integrity sha512-qOiVORSPm6Ce4/Yu6hbSgNHABLP2VMv8QOC3tTDNHHlWY19pPyc++fBTbZPtx6egPXi4HQxKDnMxVxpbtX2GoA== + dependencies: + "@smithy/util-buffer-from" "^2.0.0" + tslib "^2.5.0" + +"@colors/colors@1.6.0", "@colors/colors@^1.6.0": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" + integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA== + +"@dabh/diagnostics@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a" + integrity sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA== + dependencies: + colorspace "1.1.x" + enabled "2.0.x" + kuler "^2.0.0" + +"@octokit/auth-token@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-3.0.4.tgz#70e941ba742bdd2b49bdb7393e821dea8520a3db" + integrity sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ== + +"@octokit/core@^4.2.1": + version "4.2.4" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-4.2.4.tgz#d8769ec2b43ff37cc3ea89ec4681a20ba58ef907" + integrity sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ== + dependencies: + "@octokit/auth-token" "^3.0.0" + "@octokit/graphql" "^5.0.0" + "@octokit/request" "^6.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + before-after-hook "^2.2.0" + universal-user-agent "^6.0.0" + +"@octokit/endpoint@^7.0.0": + version "7.0.6" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.6.tgz#791f65d3937555141fb6c08f91d618a7d645f1e2" + integrity sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg== + dependencies: + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + universal-user-agent "^6.0.0" + +"@octokit/graphql@^5.0.0": + version "5.0.6" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.6.tgz#9eac411ac4353ccc5d3fca7d76736e6888c5d248" + integrity sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw== + dependencies: + "@octokit/request" "^6.0.0" + "@octokit/types" "^9.0.0" + universal-user-agent "^6.0.0" + +"@octokit/openapi-types@^18.0.0": + version "18.1.1" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-18.1.1.tgz#09bdfdabfd8e16d16324326da5148010d765f009" + integrity sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw== + +"@octokit/plugin-paginate-rest@^6.1.2": + version "6.1.2" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz#f86456a7a1fe9e58fec6385a85cf1b34072341f8" + integrity sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ== + dependencies: + "@octokit/tsconfig" "^1.0.2" + "@octokit/types" "^9.2.3" + +"@octokit/plugin-request-log@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz#5e50ed7083a613816b1e4a28aeec5fb7f1462e85" + integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA== + +"@octokit/plugin-rest-endpoint-methods@^7.1.2": + version "7.2.3" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz#37a84b171a6cb6658816c82c4082ac3512021797" + integrity sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA== + dependencies: + "@octokit/types" "^10.0.0" + +"@octokit/request-error@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-3.0.3.tgz#ef3dd08b8e964e53e55d471acfe00baa892b9c69" + integrity sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ== + dependencies: + "@octokit/types" "^9.0.0" + deprecation "^2.0.0" + once "^1.4.0" + +"@octokit/request@^6.0.0": + version "6.2.8" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.8.tgz#aaf480b32ab2b210e9dadd8271d187c93171d8eb" + integrity sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw== + dependencies: + "@octokit/endpoint" "^7.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + node-fetch "^2.6.7" + universal-user-agent "^6.0.0" + +"@octokit/rest@^19.0.3": + version "19.0.13" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.13.tgz#e799393264edc6d3c67eeda9e5bd7832dcf974e4" + integrity sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA== + dependencies: + "@octokit/core" "^4.2.1" + "@octokit/plugin-paginate-rest" "^6.1.2" + "@octokit/plugin-request-log" "^1.0.4" + "@octokit/plugin-rest-endpoint-methods" "^7.1.2" + +"@octokit/tsconfig@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@octokit/tsconfig/-/tsconfig-1.0.2.tgz#59b024d6f3c0ed82f00d08ead5b3750469125af7" + integrity sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA== + +"@octokit/types@^10.0.0": + version "10.0.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-10.0.0.tgz#7ee19c464ea4ada306c43f1a45d444000f419a4a" + integrity sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg== + dependencies: + "@octokit/openapi-types" "^18.0.0" + +"@octokit/types@^9.0.0", "@octokit/types@^9.2.3": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-9.3.2.tgz#3f5f89903b69f6a2d196d78ec35f888c0013cac5" + integrity sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA== + dependencies: + "@octokit/openapi-types" "^18.0.0" + +"@severlessworkflow/sdk-typescript@^3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@severlessworkflow/sdk-typescript/-/sdk-typescript-3.0.3.tgz#6fcec70e5651db77e0145697c91b8c40808d0526" + integrity sha512-lrIyDa5jI+nfMZg2Q2u70cRJBRGu2FtASVgzci7/MW5YxtTFYGYfc4rRxuMEf3EHVSFCVTKrtCYp4v2rHeLQYw== + dependencies: + ajv "^8.1.0" + js-yaml "^4.1.0" + +"@smithy/is-array-buffer@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" + integrity sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug== + dependencies: + tslib "^2.5.0" + +"@smithy/util-buffer-from@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" + integrity sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw== + dependencies: + "@smithy/is-array-buffer" "^2.0.0" + tslib "^2.5.0" + +"@types/triple-beam@^1.3.2": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c" + integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw== + +"@urql/core@^4.1.4": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@urql/core/-/core-4.2.2.tgz#c2b009373cb9084bbfa8ebc0177c854a76235b84" + integrity sha512-TP1kheq9bnrEdnVbJqh0g0ZY/wfdpPeAzjiiDK+Tm+Pbi0O1Xdu6+fUJ/wJo5QpHZzkIyya4/AecG63e6scFqQ== + dependencies: + "@0no-co/graphql.web" "^1.0.1" + wonka "^6.3.2" + +accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv@^8.0.0, ajv@^8.1.0, ajv@^8.11.0: + version "8.12.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +async@^3.2.3: + version "3.2.5" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" + integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== + +available-typed-arrays@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== + +before-after-hook@^2.2.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" + integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== + +bignumber.js@^9.0.0: + version "9.1.2" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.2.tgz#b7c4242259c008903b13707983b5f4bbd31eda0c" + integrity sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug== + +body-parser@1.20.1: + version "1.20.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.11.0" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0, call-bind@^1.0.2, call-bind@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" + integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== + dependencies: + function-bind "^1.1.2" + get-intrinsic "^1.2.1" + set-function-length "^1.1.1" + +cloudevents@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/cloudevents/-/cloudevents-8.0.0.tgz#d5e3f9f56fc70aad5a97f3ea272559d8b7d74317" + integrity sha512-G1Z/r8QMFAsP+F1PuZSHzx1ocPy4vrdQMTHD3orjDaM5kccmPU6nMmpVrF07b53aaxcrLbORUmRepY/DgvdhVw== + dependencies: + ajv "^8.11.0" + ajv-formats "^2.1.1" + json-bigint "^1.0.0" + process "^0.11.10" + util "^0.12.4" + uuid "^8.3.2" + +color-convert@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color-string@^1.6.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4" + integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + +color@^3.1.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/color/-/color-3.2.1.tgz#3544dc198caf4490c3ecc9a790b54fe9ff45e164" + integrity sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA== + dependencies: + color-convert "^1.9.3" + color-string "^1.6.0" + +colorspace@1.1.x: + version "1.1.4" + resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.4.tgz#8d442d1186152f60453bf8070cd66eb364e59243" + integrity sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w== + dependencies: + color "^3.1.3" + text-hex "1.0.x" + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +debug@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +define-data-property@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +deprecation@^2.0.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" + integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +enabled@2.0.x: + version "2.0.0" + resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2" + integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +express-promise-router@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/express-promise-router/-/express-promise-router-4.1.1.tgz#8fac102060b9bcc868f84d34fbb12fd8fa494291" + integrity sha512-Lkvcy/ZGrBhzkl3y7uYBHLMtLI4D6XQ2kiFg9dq7fbktBch5gjqJ0+KovX0cvCAvTJw92raWunRLM/OM+5l4fA== + dependencies: + is-promise "^4.0.0" + lodash.flattendeep "^4.0.0" + methods "^1.0.0" + +express@^4.18.2: + version "4.18.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.1" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.11.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fecha@^4.2.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd" + integrity sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw== + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +fn.name@1.x.x: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" + integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== + +for-each@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" + integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== + dependencies: + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + +graceful-fs@^4.1.6, graceful-fs@^4.2.0: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +has-property-descriptors@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz#52ba30b6c5ec87fd89fa574bc1c39125c6f65340" + integrity sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg== + dependencies: + get-intrinsic "^1.2.2" + +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + +has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +hasown@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== + dependencies: + function-bind "^1.1.2" + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +inherits@2.0.4, inherits@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-arguments@^1.0.4: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + +is-callable@^1.1.3: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-generator-function@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + dependencies: + has-tostringtag "^1.0.0" + +is-plain-object@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== + +is-promise@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" + integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-typed-array@^1.1.3: + version "1.1.12" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== + dependencies: + which-typed-array "^1.1.11" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +json-bigint@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1" + integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ== + dependencies: + bignumber.js "^9.0.0" + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +kuler@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" + integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== + +lodash.flattendeep@^4.0.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" + integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ== + +logform@^2.3.2, logform@^2.4.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/logform/-/logform-2.6.0.tgz#8c82a983f05d6eaeb2d75e3decae7a768b2bf9b5" + integrity sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ== + dependencies: + "@colors/colors" "1.6.0" + "@types/triple-beam" "^1.3.2" + fecha "^4.2.0" + ms "^2.1.1" + safe-stable-stringify "^2.3.1" + triple-beam "^1.3.0" + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +methods@^1.0.0, methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + +object-inspect@^1.9.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" + integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +one-time@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45" + integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g== + dependencies: + fn.name "1.x.x" + +openapi-types@^12.1.3: + version "12.1.3" + resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" + integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +punycode@^2.1.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + +qs@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + dependencies: + side-channel "^1.0.4" + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +readable-stream@^3.4.0, readable-stream@^3.6.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +safe-buffer@5.2.1, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-stable-stringify@^2.3.1: + version "2.4.3" + resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886" + integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g== + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +set-function-length@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" + integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== + dependencies: + define-data-property "^1.1.1" + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg== + dependencies: + is-arrayish "^0.3.1" + +stack-trace@0.0.x: + version "0.0.10" + resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" + integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg== + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +text-hex@1.0.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" + integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg== + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +triple-beam@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984" + integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg== + +tslib@^2.5.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +universal-user-agent@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" + integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== + +universalify@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" + integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +util-deprecate@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util@^0.12.4: + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== + dependencies: + inherits "^2.0.3" + is-arguments "^1.0.4" + is-generator-function "^1.0.7" + is-typed-array "^1.1.3" + which-typed-array "^1.1.2" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which-typed-array@^1.1.11, which-typed-array@^1.1.2: + version "1.1.13" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" + integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.4" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + +winston-transport@^4.5.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.6.0.tgz#f1c1a665ad1b366df72199e27892721832a19e1b" + integrity sha512-wbBA9PbPAHxKiygo7ub7BYRiKxms0tpfU2ljtWzb3SjRjv5yl6Ozuy/TkXf00HTAt+Uylo3gSkNwzc4ME0wiIg== + dependencies: + logform "^2.3.2" + readable-stream "^3.6.0" + triple-beam "^1.3.0" + +winston@^3.11.0: + version "3.11.0" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.11.0.tgz#2d50b0a695a2758bb1c95279f0a88e858163ed91" + integrity sha512-L3yR6/MzZAOl0DsysUXHVjOwv8mKZ71TrA/41EIduGpOOV5LQVodqN+QdQ6BS6PJ/RdIshZhq84P/fStEZkk7g== + dependencies: + "@colors/colors" "^1.6.0" + "@dabh/diagnostics" "^2.0.2" + async "^3.2.3" + is-stream "^2.0.0" + logform "^2.4.0" + one-time "^1.0.0" + readable-stream "^3.4.0" + safe-stable-stringify "^2.3.1" + stack-trace "0.0.x" + triple-beam "^1.3.0" + winston-transport "^4.5.0" + +wonka@^6.3.2: + version "6.3.4" + resolved "https://registry.yarnpkg.com/wonka/-/wonka-6.3.4.tgz#76eb9316e3d67d7febf4945202b5bdb2db534594" + integrity sha512-CjpbqNtBGNAeyNS/9W6q3kSkKE52+FjIj7AkFlLr11s/VWGUu6a2CdYSdGxocIhIVjaW/zchesBQUKPVU69Cqg== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yn@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yn/-/yn-5.0.0.tgz#63fc2e2e0056cf294397eed6ad4a3fbdf707f26f" + integrity sha512-+l37+9TyGEsyxGLaTg6QgYy5KnOp74ZZl4dPFLQpBWSkO99uBC5jnS0pOGwXFViPbiaEtWbParH2KrgWWF2duQ== diff --git a/plugins/orchestrator-backend/package.json b/plugins/orchestrator-backend/package.json new file mode 100644 index 0000000000..0190838149 --- /dev/null +++ b/plugins/orchestrator-backend/package.json @@ -0,0 +1,95 @@ +{ + "name": "@janus-idp/backstage-plugin-orchestrator-backend", + "version": "0.0.1", + "license": "Apache-2.0", + "main": "src/index.ts", + "types": "src/index.ts", + "publishConfig": { + "access": "public", + "main": "dist/index.cjs.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "backend-plugin" + }, + "exports": { + ".": "./src/index.ts", + "./alpha": "./src/alpha.ts", + "./package.json": "./package.json" + }, + "typesVersions": { + "*": { + "alpha": [ + "src/alpha.ts" + ], + "package.json": [ + "package.json" + ] + } + }, + "homepage": "https://janus-idp.io/", + "repository": "github:janus-idp/backstage-plugins", + "bugs": "https://github.com/janus-idp/backstage-plugins/issues", + "keywords": [ + "backstage", + "plugin", + "orchestrator", + "workflows" + ], + "files": [ + "app-config.janus-idp.yaml", + "dist", + "dist-dynamic/*.*", + "dist-dynamic/dist/**", + "dist-dynamic/alpha/*" + ], + "scripts": { + "start": "backstage-cli package start", + "build": "backstage-cli package build", + "tsc": "tsc", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test --passWithNoTests --coverage", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack", + "export-dynamic": "janus-cli package export-dynamic-plugin" + }, + "dependencies": { + "@backstage/backend-app-api": "^0.5.8", + "@backstage/backend-common": "^0.19.8", + "@backstage/backend-plugin-api": "^0.6.6", + "@backstage/backend-plugin-manager": "npm:@janus-idp/backend-plugin-manager@0.0.2-janus.5", + "@backstage/backend-tasks": "^0.5.11", + "@backstage/catalog-client": "^1.4.5", + "@backstage/catalog-model": "^1.4.3", + "@backstage/config": "^1.1.1", + "@backstage/core-plugin-api": "^1.7.0", + "@backstage/integration": "^1.7.1", + "@backstage/plugin-catalog-node": "^1.4.7", + "@backstage/plugin-events-backend": "^0.2.8", + "@backstage/plugin-events-node": "^0.2.8", + "@backstage/plugin-scaffolder-backend": "^1.18.0", + "@backstage/plugin-scaffolder-common": "^1.4.2", + "@backstage/plugin-scaffolder-node": "^0.2.6", + "@backstage/types": "^1.1.1", + "@janus-idp/backstage-plugin-orchestrator-common": "0.0.1", + "@octokit/rest": "^19.0.3", + "@severlessworkflow/sdk-typescript": "^3.0.3", + "@urql/core": "^4.1.4", + "cloudevents": "^8.0.0", + "express": "^4.18.2", + "express-promise-router": "^4.1.1", + "fs-extra": "^10.1.0", + "json-schema": "^0.4.0", + "openapi-types": "^12.1.3", + "winston": "^3.11.0", + "yn": "^5.0.0" + }, + "devDependencies": { + "@backstage/cli": "0.23.0", + "@janus-idp/cli": "1.4.7", + "@types/express": "4.17.20", + "@types/fs-extra": "^11.0.1", + "@types/json-schema": "^7.0.12" + } +} diff --git a/plugins/orchestrator-backend/src/OrchestratorPlugin.ts b/plugins/orchestrator-backend/src/OrchestratorPlugin.ts new file mode 100644 index 0000000000..7941c02a57 --- /dev/null +++ b/plugins/orchestrator-backend/src/OrchestratorPlugin.ts @@ -0,0 +1,44 @@ +import { loggerToWinstonLogger } from '@backstage/backend-common'; +import { + coreServices, + createBackendPlugin, +} from '@backstage/backend-plugin-api'; +import { catalogServiceRef } from '@backstage/plugin-catalog-node/alpha'; +import { DefaultEventBroker } from '@backstage/plugin-events-backend'; + +import { createRouter } from './routerWrapper'; + +export const orchestratorPlugin = createBackendPlugin({ + pluginId: 'orchestrator', + register(env) { + env.registerInit({ + deps: { + logger: coreServices.logger, + config: coreServices.rootConfig, + discovery: coreServices.discovery, + httpRouter: coreServices.httpRouter, + urlReader: coreServices.urlReader, + catalogApi: catalogServiceRef, + }, + async init({ + logger, + config, + discovery, + httpRouter, + catalogApi, + urlReader, + }) { + const log = loggerToWinstonLogger(logger); + const router = await createRouter({ + eventBroker: new DefaultEventBroker(log), + config: config, + logger: log, + discovery: discovery, + catalogApi: catalogApi, + urlReader: urlReader, + }); + httpRouter.use(router); + }, + }); + }, +}); diff --git a/plugins/orchestrator-backend/src/alpha.ts b/plugins/orchestrator-backend/src/alpha.ts new file mode 100644 index 0000000000..3896270d6a --- /dev/null +++ b/plugins/orchestrator-backend/src/alpha.ts @@ -0,0 +1 @@ +export * from './dynamic/alpha'; diff --git a/plugins/orchestrator-backend/src/dynamic/alpha.ts b/plugins/orchestrator-backend/src/dynamic/alpha.ts new file mode 100644 index 0000000000..c9d85c1261 --- /dev/null +++ b/plugins/orchestrator-backend/src/dynamic/alpha.ts @@ -0,0 +1,9 @@ +import { BackendDynamicPluginInstaller } from '@backstage/backend-plugin-manager'; + +import orchestratorModuleEntityProvider from '../module'; +import { orchestratorPlugin } from '../OrchestratorPlugin'; + +export const dynamicPluginInstaller: BackendDynamicPluginInstaller = { + kind: 'new', + install: () => [orchestratorPlugin(), orchestratorModuleEntityProvider()], +}; diff --git a/plugins/orchestrator-backend/src/dynamic/index.ts b/plugins/orchestrator-backend/src/dynamic/index.ts new file mode 100644 index 0000000000..688e26e164 --- /dev/null +++ b/plugins/orchestrator-backend/src/dynamic/index.ts @@ -0,0 +1,35 @@ +import { HostDiscovery } from '@backstage/backend-app-api'; +import { BackendDynamicPluginInstaller } from '@backstage/backend-plugin-manager'; +import { CatalogClient } from '@backstage/catalog-client'; + +import { OrchestratorEntityProvider } from '../provider'; +import { createRouter } from '../routerWrapper'; + +export const dynamicPluginInstaller: BackendDynamicPluginInstaller = { + kind: 'legacy', + router: { + pluginID: 'orchestrator', + createPlugin: async env => { + const catalogApi = new CatalogClient({ + discoveryApi: HostDiscovery.fromConfig(env.config), + }); + return createRouter({ + ...env, + urlReader: env.reader, + catalogApi, + }); + }, + }, + async catalog(builder, env) { + const isIntegrationEnabled = !!env.config.getOptionalBoolean( + 'orchestrator.catalog.isEnabled', + ); + if (!isIntegrationEnabled) { + env.logger.info('The integration with the Catalog plugin is disabled.'); + return; + } + builder.addEntityProvider( + await OrchestratorEntityProvider.fromConfig({ ...env }), + ); + }, +}; diff --git a/plugins/orchestrator-backend/src/helpers/errorBuilder.ts b/plugins/orchestrator-backend/src/helpers/errorBuilder.ts new file mode 100644 index 0000000000..32b5070917 --- /dev/null +++ b/plugins/orchestrator-backend/src/helpers/errorBuilder.ts @@ -0,0 +1,34 @@ +export const NO_DATA_INDEX_URL = 'NO_DATA_INDEX_URL'; +export const NO_BACKEND_EXEC_CTX = 'NO_BACKEND_EXEC_CTX'; +export const NO_CLIENT_PROVIDED = 'NO_CLIENT_PROVIDED'; +export const NO_LOGGER = 'NO_LOGGER'; +export const SWF_BACKEND_NOT_INITED = 'SWF_BACKEND_NOT_INITED'; + +export class ErrorBuilder { + public static NewBackendError(name: string, message: string): Error { + const e = new Error(message); + e.name = name; + return e; + } + + public static GET_NO_DATA_INDEX_URL_ERR(): Error { + return this.NewBackendError( + NO_DATA_INDEX_URL, + 'No data index url specified or found', + ); + } + + public static GET_NO_CLIENT_PROVIDED_ERR(): Error { + return this.NewBackendError( + NO_CLIENT_PROVIDED, + 'No or null graphql client', + ); + } + + public static GET_SWF_BACKEND_NOT_INITED(): Error { + return this.NewBackendError( + SWF_BACKEND_NOT_INITED, + 'The SonataFlow backend is not initialized, call initialize() method before trying to get the workflows.', + ); + } +} diff --git a/plugins/orchestrator-backend/src/index.ts b/plugins/orchestrator-backend/src/index.ts new file mode 100644 index 0000000000..f764df1ba0 --- /dev/null +++ b/plugins/orchestrator-backend/src/index.ts @@ -0,0 +1,4 @@ +export * from './dynamic/index'; + +export { createRouter } from './routerWrapper'; +export { OrchestratorEntityProvider } from './provider'; diff --git a/plugins/orchestrator-backend/src/module/OrchestratorModuleEntityProvider.ts b/plugins/orchestrator-backend/src/module/OrchestratorModuleEntityProvider.ts new file mode 100644 index 0000000000..894e782576 --- /dev/null +++ b/plugins/orchestrator-backend/src/module/OrchestratorModuleEntityProvider.ts @@ -0,0 +1,44 @@ +import { loggerToWinstonLogger } from '@backstage/backend-common'; +import { + coreServices, + createBackendModule, +} from '@backstage/backend-plugin-api'; +import { catalogProcessingExtensionPoint } from '@backstage/plugin-catalog-node/alpha'; +import { DefaultEventBroker } from '@backstage/plugin-events-backend'; + +import { OrchestratorEntityProvider } from '../provider'; + +export const orchestratorModuleEntityProvider = createBackendModule({ + pluginId: 'catalog', + moduleId: 'orchestrator-entity-provider', + register(reg) { + reg.registerInit({ + deps: { + logger: coreServices.logger, + config: coreServices.rootConfig, + discovery: coreServices.discovery, + scheduler: coreServices.scheduler, + catalog: catalogProcessingExtensionPoint, + }, + async init({ logger, config, discovery, scheduler, catalog }) { + const isIntegrationEnabled = !!config.getOptionalBoolean( + 'orchestrator.catalog.isEnabled', + ); + if (!isIntegrationEnabled) { + logger.info('The integration with the Catalog plugin is disabled.'); + return; + } + const winstonLogger = loggerToWinstonLogger(logger); + const eventBroker = new DefaultEventBroker(winstonLogger); + const provider = await OrchestratorEntityProvider.fromConfig({ + config, + discovery, + logger: winstonLogger, + scheduler, + }); + eventBroker.subscribe(provider); + catalog.addEntityProvider(provider); + }, + }); + }, +}); diff --git a/plugins/orchestrator-backend/src/module/index.ts b/plugins/orchestrator-backend/src/module/index.ts new file mode 100644 index 0000000000..362f568def --- /dev/null +++ b/plugins/orchestrator-backend/src/module/index.ts @@ -0,0 +1 @@ +export { orchestratorModuleEntityProvider as default } from './OrchestratorModuleEntityProvider'; diff --git a/plugins/orchestrator-backend/src/provider/OrchestratorEntityProvider.ts b/plugins/orchestrator-backend/src/provider/OrchestratorEntityProvider.ts new file mode 100644 index 0000000000..fcb7e88c2b --- /dev/null +++ b/plugins/orchestrator-backend/src/provider/OrchestratorEntityProvider.ts @@ -0,0 +1,172 @@ +import { PluginTaskScheduler } from '@backstage/backend-tasks'; +import { + ANNOTATION_LOCATION, + ANNOTATION_ORIGIN_LOCATION, + ANNOTATION_SOURCE_LOCATION, + ANNOTATION_VIEW_URL, + Entity, +} from '@backstage/catalog-model'; +import { Config } from '@backstage/config'; +import { DiscoveryApi } from '@backstage/core-plugin-api'; +import { + EntityProvider, + EntityProviderConnection, +} from '@backstage/plugin-catalog-node'; +import { EventParams, EventSubscriber } from '@backstage/plugin-events-node'; +import { TemplateEntityV1beta3 } from '@backstage/plugin-scaffolder-common'; + +import { Logger } from 'winston'; + +import { + DEFAULT_CATALOG_ENVIRONMENT, + DEFAULT_CATALOG_OWNER, + getWorkflowCategory, + ORCHESTRATOR_SERVICE_READY_TOPIC, + WORKFLOW_TYPE, + WorkflowCategory, + WorkflowItem, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +export class OrchestratorEntityProvider + implements EntityProvider, EventSubscriber +{ + private connection: EntityProviderConnection | undefined; + + private readonly scheduler: PluginTaskScheduler; + private readonly logger: Logger; + + private readonly owner: string; + private readonly environment: string; + + private readonly orchestratorPluginUrl: string; + + static async fromConfig(args: { + config: Config; + logger: Logger; + scheduler: PluginTaskScheduler; + discovery: DiscoveryApi; + }): Promise { + const owner = + args.config.getOptionalString('orchestrator.catalog.owner') ?? + DEFAULT_CATALOG_OWNER; + const environment = + args.config.getOptionalString('orchestrator.catalog.environment') ?? + DEFAULT_CATALOG_ENVIRONMENT; + + const orchestratorPluginUrl = + await args.discovery.getBaseUrl('orchestrator'); + + return new OrchestratorEntityProvider({ + orchestratorPluginUrl, + scheduler: args.scheduler, + logger: args.logger, + owner, + environment, + }); + } + + constructor(args: { + orchestratorPluginUrl: string; + scheduler: PluginTaskScheduler; + logger: Logger; + owner: string; + environment: string; + }) { + this.orchestratorPluginUrl = args.orchestratorPluginUrl; + this.scheduler = args.scheduler; + this.owner = args.owner; + this.logger = args.logger; + this.environment = args.environment; + } + + getProviderName(): string { + return OrchestratorEntityProvider.name; + } + + supportsEventTopics(): string[] { + return [ORCHESTRATOR_SERVICE_READY_TOPIC]; + } + + async connect(connection: EntityProviderConnection): Promise { + this.connection = connection; + + this.scheduler.scheduleTask({ + id: `${this.getProviderName()}__task`, + fn: async () => { + await this.run(); + }, + frequency: { seconds: 5 }, + timeout: { minutes: 10 }, + }); + } + + async onEvent(params: EventParams): Promise { + if (params.topic !== ORCHESTRATOR_SERVICE_READY_TOPIC) { + return; + } + await this.run(); + } + + async run() { + if (!this.connection) { + return; + } + + this.logger.info('Retrieving workflow definitions'); + + try { + const svcResponse = await fetch( + `${this.orchestratorPluginUrl}/workflows`, + ); + const json = await svcResponse.json(); + const items = json.items as WorkflowItem[]; + + const entities: Entity[] = items?.length + ? this.workflowToTemplateEntities(items) + : []; + + await this.connection.applyMutation({ + type: 'full', + entities: entities.map(entity => ({ + entity, + locationKey: `${this.getProviderName()}:${this.environment}`, + })), + }); + } catch (e) { + this.logger.error('Error retrieving workflow definitions', e); + } + } + + private workflowToTemplateEntities( + items: WorkflowItem[], + ): TemplateEntityV1beta3[] { + return items + .filter(i => i.serviceUrl) + .map(i => { + const sanitizedId = i.definition.id.replace(/ /g, '_'); + const category: WorkflowCategory = getWorkflowCategory(i.definition); + + return { + apiVersion: 'scaffolder.backstage.io/v1beta3', + kind: 'Template', + metadata: { + name: sanitizedId, + title: i.definition.name, + description: i.definition.description, + tags: [category], + annotations: { + [ANNOTATION_LOCATION]: `url:${i.serviceUrl}`, + [ANNOTATION_ORIGIN_LOCATION]: `url:${i.serviceUrl}`, + [ANNOTATION_SOURCE_LOCATION]: `url:${i.serviceUrl}/management/processes/${sanitizedId}/source`, + [ANNOTATION_VIEW_URL]: `${i.serviceUrl}/management/processes/${sanitizedId}/source`, + }, + }, + spec: { + owner: this.owner, + type: WORKFLOW_TYPE, + steps: [], + }, + }; + }); + } +} diff --git a/plugins/orchestrator-backend/src/provider/index.ts b/plugins/orchestrator-backend/src/provider/index.ts new file mode 100644 index 0000000000..50c275b660 --- /dev/null +++ b/plugins/orchestrator-backend/src/provider/index.ts @@ -0,0 +1 @@ +export { OrchestratorEntityProvider } from './OrchestratorEntityProvider'; diff --git a/plugins/orchestrator-backend/src/routerWrapper/index.ts b/plugins/orchestrator-backend/src/routerWrapper/index.ts new file mode 100644 index 0000000000..a9fc7fc6af --- /dev/null +++ b/plugins/orchestrator-backend/src/routerWrapper/index.ts @@ -0,0 +1,57 @@ +import { UrlReader } from '@backstage/backend-common'; +import { CatalogApi } from '@backstage/catalog-client'; +import { Config } from '@backstage/config'; +import { DiscoveryApi } from '@backstage/core-plugin-api'; +import { EventBroker } from '@backstage/plugin-events-node'; + +import express from 'express'; +import { Logger } from 'winston'; + +import { DataIndexService } from '../service/DataIndexService'; +import { createBackendRouter } from '../service/router'; +import { SonataFlowService } from '../service/SonataFlowService'; + +export interface RouterArgs { + eventBroker: EventBroker; + config: Config; + logger: Logger; + discovery: DiscoveryApi; + catalogApi: CatalogApi; + urlReader: UrlReader; +} + +export async function createRouter(args: RouterArgs): Promise { + const dataIndexService = initDataIndexService(args.logger, args.config); + const sonataFlowService = new SonataFlowService( + args.config, + dataIndexService, + args.logger, + ); + + const router = await createBackendRouter({ + eventBroker: args.eventBroker, + config: args.config, + logger: args.logger, + discovery: args.discovery, + catalogApi: args.catalogApi, + urlReader: args.urlReader, + sonataFlowService, + dataIndexService, + }); + + const isSonataFlowUp = await sonataFlowService.connect(); + + if (!isSonataFlowUp) { + args.logger.error('SonataFlow is not up. Check your configuration.'); + } + + return router; +} + +function initDataIndexService( + logger: Logger, + config: Config, +): DataIndexService { + const dataIndexUrl = config.getString('orchestrator.dataIndexService.url'); + return new DataIndexService(dataIndexUrl, logger); +} diff --git a/plugins/orchestrator-backend/src/run.ts b/plugins/orchestrator-backend/src/run.ts new file mode 100644 index 0000000000..0172145955 --- /dev/null +++ b/plugins/orchestrator-backend/src/run.ts @@ -0,0 +1,42 @@ +import { + getRootLogger, + HostDiscovery, + loadBackendConfig, + UrlReaders, +} from '@backstage/backend-common'; +import { CatalogClient } from '@backstage/catalog-client'; +import { DefaultEventBroker } from '@backstage/plugin-events-backend'; + +import yn from 'yn'; + +import { startStandaloneServer } from '../dev'; + +const port = process.env.PLUGIN_PORT ? Number(process.env.PLUGIN_PORT) : 7007; +const enableCors = yn(process.env.PLUGIN_CORS, { default: false }); +const logger = getRootLogger(); +const config = await loadBackendConfig({ logger, argv: process.argv }); +const eventBroker = new DefaultEventBroker(logger); +const discovery = HostDiscovery.fromConfig(config); +const catalogApi = new CatalogClient({ + discoveryApi: HostDiscovery.fromConfig(config), +}); +const urlReader = UrlReaders.default({ logger, config }); + +startStandaloneServer({ + port, + enableCors, + logger, + eventBroker, + config, + discovery, + catalogApi, + urlReader, +}).catch(err => { + logger.error(err); + process.exit(1); +}); + +process.on('SIGINT', () => { + logger.info('CTRL+C pressed; exiting.'); + process.exit(0); +}); diff --git a/plugins/orchestrator-backend/src/service/CloudEventService.ts b/plugins/orchestrator-backend/src/service/CloudEventService.ts new file mode 100644 index 0000000000..140895a1a3 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/CloudEventService.ts @@ -0,0 +1,36 @@ +import { CloudEvent, emitterFor, httpTransport } from 'cloudevents'; +import { Logger } from 'winston'; + +export type CloudEventResponse = + | { success: true } + | { success: false; error: string }; + +export class CloudEventService { + constructor(private readonly logger: Logger) {} + + public async send(args: { + event: CloudEvent; + endpoint?: string; + }): Promise { + try { + if (!args.endpoint) { + throw new Error('Endpoint is required'); + } + const targetUrl = args.endpoint; + this.logger.info( + `Sending CloudEvent to ${targetUrl} with data ${JSON.stringify( + args.event, + )}`, + ); + const emit = emitterFor(httpTransport(targetUrl)); + await emit(args.event); + return { success: true }; + } catch (e: any) { + this.logger.error(e); + return { + success: false, + error: e.message, + }; + } + } +} diff --git a/plugins/orchestrator-backend/src/service/DataIndexService.ts b/plugins/orchestrator-backend/src/service/DataIndexService.ts new file mode 100644 index 0000000000..2240a3dc8a --- /dev/null +++ b/plugins/orchestrator-backend/src/service/DataIndexService.ts @@ -0,0 +1,225 @@ +import { Client, fetchExchange, gql } from '@urql/core'; +import { Logger } from 'winston'; + +import { + fromWorkflowSource, + getWorkflowCategory, + Job, + ProcessInstance, + WorkflowDefinition, + WorkflowInfo, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { ErrorBuilder } from '../helpers/errorBuilder'; + +export class DataIndexService { + private client: Client; + + public constructor( + private readonly dataIndexUrl: string, + private readonly logger: Logger, + ) { + if (!dataIndexUrl.length) { + throw ErrorBuilder.GET_NO_DATA_INDEX_URL_ERR(); + } + + this.client = this.getNewGraphQLClient(); + this.logger.info('DataIndexService Initialized'); + } + + private getNewGraphQLClient(): Client { + const diURL = `${this.dataIndexUrl}/graphql`; + return new Client({ + url: diURL, + exchanges: [fetchExchange], + }); + } + + public async abortWorkflowInstance(workflowId: string) { + this.logger.info(`Aborting workflow instance ${workflowId}`); + const ProcessInstanceAbortMutationDocument = gql` + mutation ProcessInstanceAbortMutation($id: String) { + ProcessInstanceAbort(id: $id) + } + `; + + const result = await this.client + .mutation(ProcessInstanceAbortMutationDocument, { id: workflowId }) + .toPromise(); + + if (result.error) { + this.logger.error( + `Error aborting workflow instance ${workflowId}: ${result.error}`, + ); + } + + this.logger.debug(`Successfully aborted workflow instance ${workflowId}`); + return result; + } + + public async getWorkflowDefinition( + definitionId: string, + ): Promise { + const graphQlQuery = `{ ProcessDefinitions ( where: {id: {equal: "${definitionId}" } } ) { id, name, version, type, endpoint, serviceUrl, source } }`; + + const result = await this.client.query(graphQlQuery, {}); + + if (result.error) { + this.logger.error(`Error fetching workflow definition ${result.error}`); + throw result.error; + } + return (result.data.ProcessDefinitions as WorkflowInfo[])[0]; + } + + public async getWorkflowDefinitions(): Promise { + const QUERY = ` + query ProcessDefinitions { + ProcessDefinitions { + id + name + version + type + endpoint + serviceUrl + } + } + `; + + this.logger.info(`getWorkflowDefinitions() called: ${this.dataIndexUrl}`); + const result = await this.client.query(QUERY, {}); + + if (result.error) { + this.logger.error( + `Error fetching data index swf results ${result.error}`, + ); + throw result.error; + } + + return result.data.ProcessDefinitions; + } + + public async fetchProcessInstances(): Promise { + const graphQlQuery = + '{ ProcessInstances ( orderBy: { start: ASC }, where: {processId: {isNull: false} } ) { id, processName, processId, businessKey, state, start, lastUpdate, end, nodes { id }, variables, parentProcessInstance {id, processName, businessKey} } }'; + + const response = await this.client.query(graphQlQuery, {}); + + if (response.error) { + this.logger.error(`Error when fetching instances: ${response.error}`); + throw response.error; + } + + const processInstancesSrc = response.data + .ProcessInstances as ProcessInstance[]; + + const processInstances = await Promise.all( + processInstancesSrc.map(async instance => { + return await this.getWorkflowDefinitionFromInstance(instance); + }), + ); + return processInstances; + } + + private async getWorkflowDefinitionFromInstance(instance: ProcessInstance) { + const workflowItem: WorkflowInfo = await this.getWorkflowDefinition( + instance.processId, + ); + if (!workflowItem?.source) { + throw new Error( + `Workflow defintion is required to fetch instance ${instance.id}`, + ); + } + const workflowDefinitionSrc: WorkflowDefinition = fromWorkflowSource( + workflowItem.source, + ); + if (workflowItem) { + instance.category = getWorkflowCategory(workflowDefinitionSrc); + instance.description = workflowItem.description; + } + return instance; + } + + public async fetchWorkflowSource( + workflowId: string, + ): Promise { + const graphQlQuery = `{ ProcessDefinitions ( where: {id: {equal: "${workflowId}" } } ) { id, source } }`; + + const response = await this.client.query(graphQlQuery, {}); + + if (response.error) { + this.logger.error( + `Error when fetching workflow source: ${response.error}`, + ); + return undefined; + } + + return response.data.ProcessDefinitions[0].source; + } + + public async fetchWorkflowInstances( + workflowId: string, + limit: number, + offset: number, + ): Promise { + const graphQlQuery = `{ ProcessInstances(where: {processId: {equal: "${workflowId}" } }, pagination: {limit: ${limit}, offset: ${offset}}) { processName, state, start, lastUpdate, end } }`; + + const result = await this.client.query(graphQlQuery, {}); + + if (result.error) { + this.logger.error( + `Error when fetching workflow instances: ${result.error}`, + ); + throw result.error; + } + + return result.data.ProcessInstances; + } + + public async fetchProcessInstanceJobs( + instanceId: string, + ): Promise { + const graphQlQuery = `{ Jobs (where: { processInstanceId: { equal: "${instanceId}" } }) { id, processId, processInstanceId, rootProcessId, status, expirationTime, priority, callbackEndpoint, repeatInterval, repeatLimit, scheduledId, retries, lastUpdate, endpoint, nodeInstanceId, executionCounter } }`; + + const result = await this.client.query(graphQlQuery, {}); + + if (result.error) { + this.logger.error(`Error when fetching jobs instances: ${result.error}`); + throw result.error; + } + + return result.data.Jobs; + } + + public async fetchProcessInstance( + instanceId: string, + ): Promise { + const graphQlQuery = `{ ProcessInstances (where: { id: {equal: "${instanceId}" } } ) { id, processName, processId, businessKey, state, start, lastUpdate, end, nodes { id, nodeId, definitionId, type, name, enter, exit }, variables, parentProcessInstance {id, processName, businessKey}, error { nodeDefinitionId, message} } }`; + + const result = await this.client.query(graphQlQuery, {}); + + if (result.error) { + this.logger.error( + `Error when fetching process instances: ${result.error}`, + ); + throw result.error; + } + + const instance = (result.data.ProcessInstances as ProcessInstance[])[0]; + const workflowItem: WorkflowInfo = await this.getWorkflowDefinition( + instance.processId, + ); + if (!workflowItem?.source) { + throw new Error( + `Workflow defintion is required to fetch instance ${instance.id}`, + ); + } + const workflowDefinitionSrc: WorkflowDefinition = fromWorkflowSource( + workflowItem.source, + ); + if (workflowItem) { + instance.category = getWorkflowCategory(workflowDefinitionSrc); + instance.description = workflowDefinitionSrc.description; + } + return instance; + } +} diff --git a/plugins/orchestrator-backend/src/service/DataInputSchemaService.ts b/plugins/orchestrator-backend/src/service/DataInputSchemaService.ts new file mode 100644 index 0000000000..67ee85a661 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/DataInputSchemaService.ts @@ -0,0 +1,1236 @@ +import { Octokit } from '@octokit/rest'; +import { Specification } from '@severlessworkflow/sdk-typescript'; +import { Callbackstate } from '@severlessworkflow/sdk-typescript/lib/definitions/callbackstate'; +import { Databasedswitchstate } from '@severlessworkflow/sdk-typescript/lib/definitions/databasedswitchstate'; +import { Eventstate } from '@severlessworkflow/sdk-typescript/lib/definitions/eventstate'; +import { Foreachstate } from '@severlessworkflow/sdk-typescript/lib/definitions/foreachstate'; +import { Injectstate } from '@severlessworkflow/sdk-typescript/lib/definitions/injectstate'; +import { Operationstate } from '@severlessworkflow/sdk-typescript/lib/definitions/operationstate'; +import { Parallelstate } from '@severlessworkflow/sdk-typescript/lib/definitions/parallelstate'; +import { Sleepstate } from '@severlessworkflow/sdk-typescript/lib/definitions/sleepstate'; +import { Transitiondatacondition } from '@severlessworkflow/sdk-typescript/lib/definitions/transitiondatacondition'; +import { Switchstate } from '@severlessworkflow/sdk-typescript/lib/definitions/types'; +import { JSONSchema4, JSONSchema7 } from 'json-schema'; +import { OpenAPIV3 } from 'openapi-types'; +import { Logger } from 'winston'; + +import { WorkflowDefinition } from '@janus-idp/backstage-plugin-orchestrator-common'; + +type OpenApiSchemaProperties = { + [k: string]: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject; +}; + +interface WorkflowFunctionArgs { + [x: string]: any; +} + +interface WorkflowActionDescriptor { + owner: string; + descriptor: string; + action: Specification.Action; +} + +type WorkflowState = + | Sleepstate + | Eventstate + | Operationstate + | Parallelstate + | Switchstate + | Injectstate + | Foreachstate + | Callbackstate; + +interface GitHubPath { + owner: string; + repo: string; + ref: string; + path: string; +} + +interface GitTreeItem { + path: string; + type: 'blob' | 'tree'; +} + +interface FileData { + content: string; + encoding: BufferEncoding; +} + +interface JsonSchemaFile { + owner: string; + fileName: string; + jsonSchema: JSONSchema4; +} + +interface ComposedJsonSchema { + compositionSchema: JsonSchemaFile; + actionSchemas: JsonSchemaFile[]; +} + +interface ScaffolderTemplate { + url: string; + values: string[]; +} + +interface WorkflowFunction { + operationId: string; + ref: Specification.Functionref; + schema: OpenAPIV3.SchemaObject; +} + +const JSON_SCHEMA_VERSION = 'http://json-schema.org/draft-04/schema#'; +const FETCH_TEMPLATE_ACTION_OPERATION_ID = 'fetch:template'; + +const Regex = { + VALUES_IN_SKELETON: /\{\{[%-]?\s*values\.(\w+)\s*[%-]?}}/gi, + CONDITION_IN_SKELETON: /\{%-?\s*if\s*values\.(\w+)\s*(?:%-?})?/gi, + GITHUB_URL: + /^https:\/\/github\.com\/([^/]+)\/([^/]+)\/(?:tree|blob)\/([^/]+)\/(.+)$/, + GITHUB_API_URL: + /^https:\/\/api\.github\.com\/repos\/([^/]+)\/([^/]+)\/contents\/(.+)\?ref=(.+)$/, + NAIVE_ARG_IN_JQ: /^\$\{[^}]*}$|^(\.[^\s.{]+)(?!\.)$/, + NON_ALPHA_NUMERIC: /[^a-zA-Z0-9]+/g, + SNAKE_CASE: /_([a-z])/g, + CAMEL_CASE: /([A-Z])/g, +} as const; + +export class DataInputSchemaService { + private readonly octokit: Octokit; + private readonly decoder = new TextDecoder('utf-8'); + + constructor( + private readonly logger: Logger, + githubToken: string | undefined, + ) { + this.octokit = new Octokit({ auth: githubToken }); + } + + private resolveObject(obj: T | undefined): T { + return { ...(obj ?? {}) } as T; + } + + private resolveAnyToArray(value: any): T[] { + if (Array.isArray(value)) { + return this.resolveArray(value); + } + return []; + } + + private resolveArray(arr: T[] | undefined): T[] { + return [...(arr ?? [])] as T[]; + } + + private resolveObjectIfNotEmpty(obj: T | undefined): T | undefined { + return Object.keys(obj ?? {}).length ? obj : undefined; + } + + private resolveArrayIfNotEmpty(arr: T[]): T[] | undefined { + return arr.length ? arr : undefined; + } + + private resolveTransitionName(d: Transitiondatacondition): string { + return typeof d.transition === 'string' + ? d.transition + : d.transition.nextState; + } + + private resolveSingleConditionalSchema( + conditionalActionSchemas: JsonSchemaFile[], + ): JsonSchemaFile | undefined { + if ( + conditionalActionSchemas.length === 1 && + this.resolveObjectIfNotEmpty( + conditionalActionSchemas[0].jsonSchema.properties, + ) + ) { + return conditionalActionSchemas[0]; + } + return undefined; + } + + public async generate(args: { + definition: WorkflowDefinition; + openApi: OpenAPIV3.Document; + }): Promise { + const workflow = args.definition as Specification.Workflow; + + const actionSchemas: JsonSchemaFile[] = []; + const workflowArgsMap = new Map(); + + const stateHandled = new Set(); + + for (const state of workflow.states) { + if (stateHandled.has(state.name!)) { + continue; + } + + stateHandled.add(state.name!); + + if (state.type === 'switch') { + const dataConditions = (state as Databasedswitchstate).dataConditions; + + const conditionalStateNames = dataConditions + ?.filter(d => (d as Transitiondatacondition).transition) + .map(d => this.resolveTransitionName(d as Transitiondatacondition)); + + const conditionalStates = workflow.states.filter(s => + conditionalStateNames.includes(s.name!), + ); + + const { conditionalActionSchemas, conditionalStatesHandled } = + await this.extractConditionalSchemas({ + workflow, + openApi: args.openApi, + conditionalStates, + workflowArgsMap, + }); + + conditionalStatesHandled.forEach(s => stateHandled.add(s)); + + if (conditionalActionSchemas.length <= 1) { + const singleConditionalSchema = this.resolveSingleConditionalSchema( + conditionalActionSchemas, + ); + + if (singleConditionalSchema) { + actionSchemas.push(singleConditionalSchema); + } + + continue; + } + + const conditionalDescriptor = this.buildActionDescriptor({ + stateName: state.name, + }); + const oneOfSchema = this.buildJsonSchemaSkeleton({ + owner: state.name, + workflowId: workflow.id, + title: conditionalDescriptor, + filename: this.sanitizeText({ + text: conditionalDescriptor, + placeholder: '_', + }), + }); + + oneOfSchema.jsonSchema.oneOf = conditionalActionSchemas.map(s => ({ + title: s.owner, + type: 'object', + properties: { + [s.owner]: { type: 'boolean', default: true }, + ...s.jsonSchema.properties, + }, + required: [ + s.owner, + ...this.resolveAnyToArray(s.jsonSchema.required), + ], + })); + + actionSchemas.push(oneOfSchema); + } else { + const actionSchemasFromState = await this.extractSchemasFromStates({ + workflow: workflow, + openApi: args.openApi, + state, + workflowArgsMap, + }); + + actionSchemas.push(...actionSchemasFromState); + } + } + + const variableSetSchema = this.extractAdditionalSchemaFromWorkflow({ + workflow, + workflowArgsMap, + }); + + if (variableSetSchema) { + actionSchemas.push(variableSetSchema); + } + + if (!actionSchemas.length) { + return null; + } + + const compositionSchema = this.buildJsonSchemaSkeleton({ + workflowId: workflow.id, + title: 'Data Input Schema', + }); + + actionSchemas.forEach(actionSchema => { + compositionSchema.jsonSchema.properties = { + ...this.resolveObject(compositionSchema.jsonSchema.properties), + [`${actionSchema.fileName}`]: { + $ref: actionSchema.fileName, + type: actionSchema.jsonSchema.type, + description: actionSchema.jsonSchema.description, + }, + }; + }); + + return { compositionSchema, actionSchemas }; + } + + private extractAdditionalSchemaFromWorkflow(args: { + workflow: Specification.Workflow; + workflowArgsMap: Map; + }): JsonSchemaFile | undefined { + const workflowVariableSet = this.extractVariablesFromWorkflow( + args.workflow, + ); + if (!workflowVariableSet.size || !args.workflow.states.length) { + return undefined; + } + + const additionalInputTitle = 'Additional input data'; + const variableSetSchema = this.buildJsonSchemaSkeleton({ + owner: 'Workflow', + workflowId: args.workflow.id, + title: additionalInputTitle, + filename: this.sanitizeText({ + text: additionalInputTitle, + placeholder: '_', + }), + }); + + Array.from(workflowVariableSet) + .filter(v => !args.workflowArgsMap.get(v)) + .forEach(item => { + variableSetSchema.jsonSchema.properties = { + ...this.resolveObject(variableSetSchema.jsonSchema.properties), + [item]: { + title: item, + type: 'string', + description: 'Extracted from the Workflow definition', + }, + }; + }); + + if ( + !this.resolveObjectIfNotEmpty(variableSetSchema.jsonSchema.properties) + ) { + return undefined; + } + + return variableSetSchema; + } + + private async extractConditionalSchemas(args: { + workflow: Specification.Workflow; + openApi: OpenAPIV3.Document; + conditionalStates: WorkflowState[]; + workflowArgsMap: Map; + }): Promise<{ + conditionalActionSchemas: JsonSchemaFile[]; + conditionalStatesHandled: Set; + }> { + const conditionalActionSchemas: JsonSchemaFile[] = []; + const conditionalStatesHandled = new Set(); + + for (const conditionalState of args.conditionalStates) { + conditionalStatesHandled.add(conditionalState.name!); + + const conditionalSchemas = await this.extractConditionalSchemasFromState({ + workflow: args.workflow, + openApi: args.openApi, + conditionalState, + workflowArgsMap: args.workflowArgsMap, + }); + + conditionalActionSchemas.push(...conditionalSchemas); + } + + return { conditionalActionSchemas, conditionalStatesHandled }; + } + + private async extractConditionalSchemasFromState(args: { + workflow: Specification.Workflow; + openApi: OpenAPIV3.Document; + conditionalState: WorkflowState; + workflowArgsMap: Map; + }): Promise { + const schemas: JsonSchemaFile[] = []; + + const actions = this.extractActionsFromState(args.conditionalState); + + for (const actionDescriptor of actions) { + const result = await this.extractSchemaFromState({ + workflow: args.workflow, + openApi: args.openApi, + actionDescriptor, + workflowArgsMap: args.workflowArgsMap, + isConditional: true, + }); + + if (!result) { + continue; + } + + schemas.push(result.actionSchema); + } + + return schemas; + } + + private async extractTemplateFromSkeletonUrl(args: { + url: string; + isConditional: boolean; + }): Promise { + const githubPath = this.convertToGitHubApiUrl(args.url); + if (!githubPath) { + return undefined; + } + + const skeletonValues = + await this.extractTemplateValuesFromSkeletonUrl(githubPath); + + if (!skeletonValues && !args.isConditional) { + return undefined; + } + + const fixedSkeletonUrl = `https://github.com/${githubPath.owner}/${githubPath.repo}/tree/${githubPath.ref}/${githubPath.path}`; + return { + values: skeletonValues, + url: fixedSkeletonUrl, + }; + } + + private async extractSchemasFromStates(args: { + workflow: Specification.Workflow; + openApi: OpenAPIV3.Document; + state: WorkflowState; + workflowArgsMap: Map; + }): Promise { + const schemas: JsonSchemaFile[] = []; + for (const actionDescriptor of this.extractActionsFromState(args.state)) { + const result = await this.extractSchemaFromState({ + workflow: args.workflow, + openApi: args.openApi, + actionDescriptor, + workflowArgsMap: args.workflowArgsMap, + isConditional: false, + }); + + if (!result) { + continue; + } + + const { actionSchema, argsMap } = result; + + argsMap.forEach((v, k) => { + args.workflowArgsMap.set(k, v); + }); + + schemas.push(actionSchema); + } + return schemas; + } + + private isValidTemplateAction( + workflowArgsToFilter: WorkflowFunctionArgs, + ): boolean { + return ( + workflowArgsToFilter.url && + workflowArgsToFilter.values && + Object.keys(workflowArgsToFilter.values).length + ); + } + + private extractFilteredArgId(args: { + map: Map; + key: string; + value: string; + description: string; + }): string | undefined { + if (args.map.has(args.key)) { + if (args.map.get(args.key) === args.value) { + return undefined; + } + return this.sanitizeText({ + text: `${args.description} ${args.key}`, + placeholder: '_', + }); + } + return args.key; + } + + private extractWorkflowFunction(args: { + workflow: Specification.Workflow; + actionDescriptor: WorkflowActionDescriptor; + openApi: OpenAPIV3.Document; + }): WorkflowFunction | undefined { + const functionRef = args.actionDescriptor.action + .functionRef as Specification.Functionref; + + if (!functionRef.arguments) { + this.logger.info( + `No arguments found for function ${functionRef.refName}. Skipping...`, + ); + return undefined; + } + + const operationId = this.extractOperationIdByWorkflowFunctionName({ + workflow: args.workflow, + functionName: functionRef.refName, + }); + + if (!operationId) { + this.logger.info( + `No operation id found for function ${functionRef.refName}. Skipping...`, + ); + return undefined; + } + + const refSchema = this.extractSchemaByOperationId({ + openApi: args.openApi, + operationId, + }); + + if (!refSchema) { + this.logger.info( + `The schema associated with ${operationId} could not be found. Skipping...`, + ); + return undefined; + } + + return { + operationId, + ref: functionRef, + schema: refSchema, + }; + } + + private async extractSchemaFromState(args: { + workflow: Specification.Workflow; + openApi: OpenAPIV3.Document; + actionDescriptor: WorkflowActionDescriptor; + workflowArgsMap: Map; + isConditional: boolean; + }): Promise< + { actionSchema: JsonSchemaFile; argsMap: Map } | undefined + > { + const wfFunction = this.extractWorkflowFunction({ + workflow: args.workflow, + actionDescriptor: args.actionDescriptor, + openApi: args.openApi, + }); + if (!wfFunction) { + return undefined; + } + + const schemaPropsToFilter = this.resolveObject( + wfFunction.schema.properties, + ); + const workflowArgsToFilter = { + ...wfFunction.ref.arguments, + } as WorkflowFunctionArgs; + + if (wfFunction.operationId === FETCH_TEMPLATE_ACTION_OPERATION_ID) { + if (!this.isValidTemplateAction(workflowArgsToFilter)) { + return undefined; + } + + const template = await this.extractTemplateFromSkeletonUrl({ + url: workflowArgsToFilter.url, + isConditional: args.isConditional, + }); + + template?.values.forEach(v => { + schemaPropsToFilter[v] = { + title: v, + description: `Extracted from ${template.url}`, + type: 'string', + }; + schemaPropsToFilter[this.snakeCaseToCamelCase(v)] = { + title: this.snakeCaseToCamelCase(v), + description: `Extracted from ${template.url}`, + type: 'string', + }; + schemaPropsToFilter[this.camelCaseToSnakeCase(v)] = { + title: this.camelCaseToSnakeCase(v), + description: `Extracted from ${template.url}`, + type: 'string', + }; + }); + + Object.keys(workflowArgsToFilter.values).forEach(k => { + workflowArgsToFilter[k] = workflowArgsToFilter.values[k]; + }); + } + + if (wfFunction.schema.oneOf?.length) { + const oneOfSchema = ( + wfFunction.schema.oneOf as OpenAPIV3.SchemaObject[] + ).find(item => + Object.keys(workflowArgsToFilter).some(arg => + Object.keys(item.properties!).includes(arg), + ), + ); + if (!oneOfSchema?.properties) { + return undefined; + } + Object.entries(oneOfSchema.properties).forEach(([k, v]) => { + schemaPropsToFilter[k] = { + ...(v as OpenAPIV3.BaseSchemaObject), + }; + }); + } + + const requiredArgsToShow = + this.extractRequiredArgsToShow(workflowArgsToFilter); + if (!Object.keys(requiredArgsToShow).length) { + return undefined; + } + + const argsMap = new Map(args.workflowArgsMap); + const filteredProperties: OpenApiSchemaProperties = {}; + const filteredRequired: string[] = []; + for (const [argKey, argValue] of Object.entries(requiredArgsToShow)) { + if (!schemaPropsToFilter.hasOwnProperty(argKey)) { + continue; + } + const argId = this.extractFilteredArgId({ + map: argsMap, + key: argKey, + value: argValue, + description: args.actionDescriptor.descriptor, + }); + if (!argId) { + continue; + } + argsMap.set(argId, argValue); + + filteredProperties[argId] = { + ...schemaPropsToFilter[argKey], + }; + filteredRequired.push(argKey); + } + + const updatedSchema = { + properties: this.resolveObjectIfNotEmpty(filteredProperties), + required: this.resolveArrayIfNotEmpty(filteredRequired), + }; + + if (!updatedSchema.properties && !args.isConditional) { + return undefined; + } + + const actionSchema = this.buildJsonSchemaSkeleton({ + owner: args.actionDescriptor.owner, + workflowId: args.workflow.id, + title: args.actionDescriptor.descriptor, + filename: this.sanitizeText({ + text: args.actionDescriptor.descriptor, + placeholder: '_', + }), + }); + + actionSchema.jsonSchema = { + ...actionSchema.jsonSchema, + ...updatedSchema, + }; + + return { actionSchema, argsMap }; + } + + private extractRequiredArgsToShow( + argsToFilter: WorkflowFunctionArgs, + ): WorkflowFunctionArgs { + return Object.entries(argsToFilter).reduce((obj, [k, v]) => { + if ( + typeof v === 'string' && + Regex.NAIVE_ARG_IN_JQ.test(String(v.trim())) + ) { + obj[k] = v; + } + return obj; + }, {} as WorkflowFunctionArgs); + } + + private extractSchemaByOperationId(args: { + openApi: OpenAPIV3.Document; + operationId: string; + }): OpenAPIV3.SchemaObject | undefined { + const openApiOperation = this.extractOperationFromOpenApi({ + openApi: args.openApi, + operationId: args.operationId, + }); + if (!openApiOperation?.requestBody) { + this.logger.info( + `The operation associated with ${args.operationId} has no requestBody.`, + ); + return undefined; + } + + const requestBodyContent = ( + openApiOperation.requestBody as OpenAPIV3.RequestBodyObject + ).content; + if (!requestBodyContent) { + this.logger.info( + `The request body associated with ${args.operationId} has no content.`, + ); + return undefined; + } + + const bodyContent = Object.values(requestBodyContent).pop(); + if (!bodyContent?.schema) { + this.logger.info( + `The body content associated with ${args.operationId} has no schema.`, + ); + return undefined; + } + + const $ref = (bodyContent.schema as OpenAPIV3.ReferenceObject).$ref; + if (!$ref) { + this.logger.info( + `The schema associated with ${args.operationId} has no $ref.`, + ); + return undefined; + } + + const refParts = $ref.split('/'); + const refKey = refParts[refParts.length - 1]; + return args.openApi.components?.schemas?.[refKey] as OpenAPIV3.SchemaObject; + } + + private extractActionsFromState( + state: WorkflowState, + ): WorkflowActionDescriptor[] { + if (state.type === 'operation' || state.type === 'foreach') { + return this.extractActionsFromOperationState({ state }); + } else if (state.type === 'parallel') { + return this.extractActionsFromParallelState({ state }); + } else if (state.type === 'event') { + return this.extractActionsFromEventState({ state }); + } else if (state.type === 'callback') { + return this.extractActionsFromCallbackState({ state }); + } + return []; + } + + private buildActionDescriptor(args: { + actionName?: string; + stateName: string; + functionRefName?: string; + outerItem?: { name: string } & ( + | { kind: 'Unique' } + | { + kind: 'Array'; + array: Specification.Onevents[] | Specification.Branch[]; + idx: number; + } + ); + actions?: { + array: Specification.Action[]; + idx: number; + }; + }): string { + const separator = ' > '; + let descriptor = args.stateName; + if (args.outerItem) { + if (args.outerItem.kind === 'Unique') { + descriptor += `${separator}${args.outerItem.name}`; + } else if (args.outerItem.array.length > 1) { + descriptor += `${separator}${args.outerItem.name}-${ + args.outerItem.idx + 1 + }`; + } + } + if (args.actionName) { + descriptor += `${separator}${args.actionName}`; + } + if (args.functionRefName) { + descriptor += `${separator}${args.functionRefName}`; + } + if (!args.actionName && args.actions && args.actions.array.length > 1) { + descriptor += `${separator}${args.actions.idx + 1}`; + } + return descriptor; + } + + private extractActionsFromOperationState(args: { + state: Specification.Operationstate | Specification.Foreachstate; + functionRefName?: string; + }): WorkflowActionDescriptor[] { + if (!args.state.actions) { + return []; + } + return args.state.actions + .filter(action => { + if (!action.functionRef || typeof action.functionRef === 'string') { + return false; + } + if (!args.functionRefName) { + return true; + } + return action.functionRef.refName === args.functionRefName; + }) + .map((action, idx, arr) => { + const descriptor = this.buildActionDescriptor({ + actionName: action.name, + stateName: args.state.name!, + functionRefName: (action.functionRef as Specification.Functionref)! + .refName, + actions: { + array: arr, + idx, + }, + }); + return { owner: args.state.name!, descriptor, action }; + }); + } + + private extractActionsFromParallelState(args: { + state: Specification.Parallelstate; + functionRefName?: string; + }): WorkflowActionDescriptor[] { + if (!args.state.branches) { + return []; + } + + return args.state.branches + .map(branch => + branch.actions + .filter(action => { + if (!action.functionRef || typeof action.functionRef === 'string') { + return false; + } + if (!args.functionRefName) { + return true; + } + return action.functionRef.refName === args.functionRefName; + }) + .map((action, idx, arr) => { + const descriptor = this.buildActionDescriptor({ + actionName: action.name, + outerItem: { + kind: 'Unique', + name: branch.name, + }, + stateName: args.state.name!, + functionRefName: + (action.functionRef as Specification.Functionref)!.refName, + actions: { + array: arr, + idx, + }, + }); + return { owner: args.state.name!, descriptor, action }; + }), + ) + .flat(); + } + + private extractActionsFromEventState(args: { + state: Specification.Eventstate; + functionRefName?: string; + }): WorkflowActionDescriptor[] { + if (!args.state.onEvents) { + return []; + } + + return args.state.onEvents + .map((onEvent, eIdx, eArr) => { + if (!onEvent.actions) { + return []; + } + return onEvent.actions + .filter(action => { + if (!action.functionRef || typeof action.functionRef === 'string') { + return false; + } + if (!args.functionRefName) { + return true; + } + return action.functionRef.refName === args.functionRefName; + }) + .map((action, aIdx, aArr) => { + const descriptor = this.buildActionDescriptor({ + actionName: action.name, + stateName: args.state.name, + functionRefName: + (action.functionRef as Specification.Functionref)!.refName, + actions: { + array: aArr, + idx: aIdx, + }, + outerItem: { + kind: 'Array', + name: 'onEvent', + array: eArr, + idx: eIdx, + }, + }); + return { owner: args.state.name, descriptor, action }; + }); + }) + .flat(); + } + + private extractActionsFromCallbackState(args: { + state: Specification.Callbackstate; + functionRefName?: string; + }): WorkflowActionDescriptor[] { + if ( + !args.state.action?.functionRef || + typeof args.state.action.functionRef === 'string' || + (args.functionRefName && + args.state.action.functionRef.refName !== args.functionRefName) + ) { + return []; + } + + const descriptor = this.buildActionDescriptor({ + actionName: args.state.action.name, + stateName: args.state.name!, + functionRefName: args.state.action.functionRef.refName, + }); + + return [{ owner: args.state.name!, descriptor, action: args.state.action }]; + } + + private snakeCaseToCamelCase(input: string): string { + return input.replace(Regex.SNAKE_CASE, (_, letter) => letter.toUpperCase()); + } + + private camelCaseToSnakeCase(input: string): string { + return input.replace( + Regex.CAMEL_CASE, + (_, letter) => `_${letter.toLowerCase()}`, + ); + } + + private sanitizeText(args: { text: string; placeholder: string }): string { + const parts = args.text.trim().split(Regex.NON_ALPHA_NUMERIC); + return parts.join(args.placeholder); + } + + private buildJsonSchemaSkeleton(args: { + workflowId: string; + title: string; + owner?: string; + filename?: string; + }): JsonSchemaFile { + const fullFileName = args.owner + ? `${args.workflowId}__sub_schema__${args.filename}.json` + : `${args.workflowId}__main_schema.json`; + return { + owner: args.owner ?? 'Workflow', + fileName: fullFileName, + jsonSchema: { + title: `${args.workflowId}: ${args.title}`, + $schema: JSON_SCHEMA_VERSION, + type: 'object', + }, + }; + } + + private extractOperationIdFromWorkflowFunction( + workflowFunction: Specification.Function, + ): string { + return workflowFunction.operation.split('#')[1]; + } + + private extractOperationIdByWorkflowFunctionName(args: { + workflow: Specification.Workflow; + functionName: string; + }): string | undefined { + if (!Array.isArray(args.workflow.functions)) { + return undefined; + } + + const workflowFunction = args.workflow.functions.find( + f => f.name === args.functionName, + ); + + if (!workflowFunction) { + return undefined; + } + + return this.extractOperationIdFromWorkflowFunction(workflowFunction); + } + + private extractOperationFromOpenApi(args: { + openApi: OpenAPIV3.Document; + operationId: string; + }): OpenAPIV3.OperationObject | undefined { + return Object.values(args.openApi.paths) + .flatMap( + methods => + methods && + Object.values(methods).filter( + method => + method && + (method as OpenAPIV3.OperationObject).operationId === + args.operationId, + ), + ) + .pop() as OpenAPIV3.OperationObject | undefined; + } + + private removeTrailingSlash(path: string): string { + if (path.endsWith('/')) { + return path.slice(0, -1); + } + return path; + } + + private removeSurroundingQuotes(inputString: string): string { + const trimmed = inputString.trim(); + if ( + (trimmed.startsWith("'") && trimmed.endsWith("'")) || + (trimmed.startsWith('"') && trimmed.endsWith('"')) + ) { + return trimmed.slice(1, -1); + } + return trimmed; + } + + private convertToGitHubApiUrl(githubUrl: string): GitHubPath | undefined { + const sanitizedUrl = this.removeSurroundingQuotes(githubUrl); + const githubApiMatch = RegExp(Regex.GITHUB_API_URL).exec(sanitizedUrl); + if (githubApiMatch) { + const [, owner, repo, ref, path] = githubApiMatch; + return { + owner, + repo, + ref, + path: this.removeTrailingSlash(path), + }; + } + + const githubUrlMatch = RegExp(Regex.GITHUB_URL).exec(sanitizedUrl); + if (!githubUrlMatch) { + return undefined; + } + + const [, owner, repo, ref, path] = githubUrlMatch; + return { + owner, + repo, + ref, + path: this.removeTrailingSlash(path), + }; + } + + private async fetchGitHubRepoPaths(repoInfo: GitHubPath): Promise { + const response = await this.octokit.request( + 'GET /repos/:owner/:repo/git/trees/:ref', + { + owner: repoInfo.owner, + repo: repoInfo.repo, + ref: repoInfo.ref, + recursive: 1, + }, + ); + return response.data.tree + .filter((item: GitTreeItem) => item.type === 'blob') + .map((item: GitTreeItem) => item.path) + .filter((path: string) => path.startsWith(`${repoInfo.path}/`)); + } + + private async extractTemplateValuesFromSkeletonUrl( + githubPath: GitHubPath, + ): Promise { + try { + const filePaths = await this.fetchGitHubRepoPaths(githubPath); + const fileMatchPromises: Promise[] = []; + + filePaths.forEach(p => { + fileMatchPromises.push( + this.extractTemplateValuesFromGitHubFile({ + ...githubPath, + path: p, + }), + ); + }); + + const fileMatches = (await Promise.all(fileMatchPromises)) + .flat() + .filter((r): r is string => r !== undefined); + + return Array.from(new Set(fileMatches)); + } catch (e) { + this.logger.error(e); + } + return []; + } + + private async extractTemplateValuesFromGitHubFile( + githubPath: GitHubPath, + ): Promise { + const valueMatchesInPath = githubPath.path.matchAll( + Regex.VALUES_IN_SKELETON, + ); + const conditionMatchesInPath = githubPath.path.matchAll( + Regex.CONDITION_IN_SKELETON, + ); + const valuesInPath = Array.from(valueMatchesInPath, match => match[1]); + const conditionsInPath = Array.from( + conditionMatchesInPath, + match => match[1], + ); + + try { + const content = await this.octokit.repos.getContent({ ...githubPath }); + if (!content) { + return []; + } + const fileData = content.data as FileData; + const fileContent = this.decoder.decode( + new Uint8Array(Buffer.from(fileData.content, fileData.encoding)), + ); + const valueMatchesInContent = fileContent.matchAll( + Regex.VALUES_IN_SKELETON, + ); + const conditionMatchesInContent = fileContent.matchAll( + Regex.CONDITION_IN_SKELETON, + ); + const valuesInContent = Array.from( + valueMatchesInContent, + match => match[1] || match[2], + ); + const conditionsInContent = Array.from( + conditionMatchesInContent, + match => match[1], + ); + + return [ + ...valuesInPath, + ...conditionsInPath, + ...valuesInContent, + ...conditionsInContent, + ]; + } catch (e) { + this.logger.error(e); + } + return []; + } + + private extractVariablesFromWorkflow( + workflow: Specification.Workflow, + ): Set { + const blockList = [ + '.actionDataFilter', + '.stateDataFilter', + '.eventDataFilter', + ]; + const inputVariableSet = new Set(); + const workflowVariableSet = new Set(); + + function traverseValue(value: any, currentPath: string) { + if (typeof value === 'string') { + handleValue(value, currentPath); + } else if (Array.isArray(value)) { + value.forEach((item, index) => { + traverseValue(item, `${currentPath}[${index}]`); + }); + } else if (typeof value === 'object') { + traverseObject(value, currentPath); + } + } + + function handleValue(value: string, currentPath: string) { + const tokens = value.split(/\s|\${|}/); + let inTemplate = false; + + tokens.forEach(token => { + if (inTemplate) { + if (token.endsWith('}')) { + inTemplate = false; + } + return; + } + + if (token.startsWith('.')) { + const variable = token.slice(1).replace(/[=!<>]{0,50}$/, ''); + if (variable && !variable.includes('.')) { + addVariable({ variable, currentPath }); + } + } + + if (token.startsWith('${')) { + inTemplate = true; + } + }); + } + + function addVariable(args: { + variable: string | undefined; + currentPath: string; + }): void { + if (!args.variable) { + return; + } + if (blockList.some(b => args.currentPath.includes(b))) { + workflowVariableSet.add(args.variable); + } else { + inputVariableSet.add(args.variable); + } + } + + function traverseObject(currentObj: any, currentPath: string) { + for (const key in currentObj) { + if (currentObj.hasOwnProperty(key)) { + const value = currentObj[key]; + const newPath = currentPath ? `${currentPath}.${key}` : key; + + traverseValue(value, newPath); + } + } + } + + traverseObject(workflow, ''); + + workflow.states.forEach(state => { + if (state.type === 'inject' && state.data) { + Object.keys(state.data).forEach(k => inputVariableSet.delete(k)); + } + }); + + workflowVariableSet.forEach(v => inputVariableSet.delete(v)); + + return inputVariableSet; + } + + public parseComposition(inputSchema: JSONSchema7): JSONSchema7[] { + if (!inputSchema.properties) { + return []; + } + + const refPaths = Object.values(inputSchema.properties) + .map(p => (p as JSONSchema7).$ref) + .filter((r): r is string => r !== undefined); + + if (!refPaths.length) { + return [inputSchema]; + } + + return refPaths + .map(r => this.findReferencedSchema({ rootSchema: inputSchema, ref: r })) + .filter((r): r is JSONSchema7 => r !== undefined); + } + + private findReferencedSchema(args: { + rootSchema: JSONSchema7; + ref: string; + }): JSONSchema7 | undefined { + const pathParts = args.ref + .split('/') + .filter(part => !['#', ''].includes(part)); + + let current: any = args.rootSchema; + for (const part of pathParts) { + current = current?.[part]; + if (current === undefined) { + return undefined; + } + } + + if (!current.properties) { + return undefined; + } + + return current; + } +} diff --git a/plugins/orchestrator-backend/src/service/GitService.ts b/plugins/orchestrator-backend/src/service/GitService.ts new file mode 100644 index 0000000000..600fc24073 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/GitService.ts @@ -0,0 +1,93 @@ +import { Git } from '@backstage/backend-common'; +import { Config } from '@backstage/config'; +import { ScmIntegrations } from '@backstage/integration'; + +import { Logger } from 'winston'; + +export class GitService { + private readonly git: Git; + + private readonly logger: Logger; + private authenticated: boolean; + + private readonly author = { + name: 'backstage-orchestrator', + email: 'orchestrator@backstage.io', + }; + + private readonly committer = { + name: 'backstage-orchestrator', + email: 'orchestrator@backstage.io', + }; + + constructor(logger: Logger, config: Config) { + this.logger = logger; + const githubIntegration = ScmIntegrations.fromConfig(config) + .github.list() + .pop(); + this.git = Git.fromAuth({ + username: 'x-access-token', + password: githubIntegration?.config.token, + }); + this.authenticated = !!githubIntegration?.config.token; + } + + async clone(repoURL: string, localPath: string): Promise { + this.logger.info(`cloning repo ${repoURL} into ${localPath}`); + return this.git + .clone({ + url: repoURL, + dir: localPath, + depth: 1, + }) + .then(() => this.git.checkout({ dir: localPath, ref: 'main' })); + } + + async push(dir: string, message: string): Promise { + if (!this.authenticated) { + this.logger.warn( + 'Git integration is required to be configured for push, with the token or credentials', + ); + return; + } + const branch = 'main'; + const force = true; + const remote = 'origin'; + const filepath = '.'; + this.git + .fetch({ remote, dir }) + .then(() => this.git.checkout({ dir, ref: branch })) + .then(() => this.git.add({ dir, filepath })) + .then(() => + this.git.commit({ + dir, + message, + author: this.author, + committer: this.committer, + }), + ) + .then(() => this.git.push({ dir, remote, remoteRef: branch, force })) + .finally(() => this.logger.info('push completed')) + .catch(ex => this.logger.error(ex)); + } + + async pull(localPath: string): Promise { + const remoteBranch = 'origin/main'; + const localBranch = 'main'; + const remote = 'origin'; + this.git + .fetch({ remote, dir: localPath }) + .then(() => this.git.checkout({ dir: localPath, ref: localBranch })) + .then(() => + this.git.merge({ + dir: localPath, + ours: localBranch, + theirs: remoteBranch, + author: this.author, + committer: this.committer, + }), + ) + .finally(() => this.logger.info('merge completed')) + .catch(ex => this.logger.error(ex)); + } +} diff --git a/plugins/orchestrator-backend/src/service/Helper.ts b/plugins/orchestrator-backend/src/service/Helper.ts new file mode 100644 index 0000000000..2134d87f77 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/Helper.ts @@ -0,0 +1,60 @@ +import { Config } from '@backstage/config'; + +import fs from 'fs-extra'; +import { Logger } from 'winston'; + +import os from 'os'; + +export async function getWorkingDirectory( + config: Config, + logger: Logger, +): Promise { + if (!config.has('backend.workingDirectory')) { + return os.tmpdir(); + } + + const workingDirectory = config.getString('backend.workingDirectory'); + try { + // Check if working directory exists and is writable + await fs.access(workingDirectory, fs.constants.F_OK | fs.constants.W_OK); + logger.info(`using working directory: ${workingDirectory}`); + } catch (err: any) { + logger.error( + `working directory ${workingDirectory} ${ + err.code === 'ENOENT' ? 'does not exist' : 'is not writable' + }`, + ); + throw err; + } + return workingDirectory; +} + +export async function executeWithRetry( + action: () => Promise, + maxErrors = 15, +): Promise { + let response: Response; + let errorCount = 0; + // execute with retry + const backoff = 5000; + while (errorCount < maxErrors) { + try { + response = await action(); + if (response.status >= 400) { + errorCount++; + // backoff + await delay(backoff); + } else { + return response; + } + } catch (e) { + errorCount++; + await delay(backoff); + } + } + throw new Error('Unable to execute query.'); +} + +export function delay(time: number) { + return new Promise(r => setTimeout(r, time)); +} diff --git a/plugins/orchestrator-backend/src/service/JiraService.ts b/plugins/orchestrator-backend/src/service/JiraService.ts new file mode 100644 index 0000000000..8e5ed1f2e2 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/JiraService.ts @@ -0,0 +1,78 @@ +import { CloudEvent } from 'cloudevents'; +import { Logger } from 'winston'; + +import { CloudEventService } from './CloudEventService'; + +export interface BaseIssueEvent { + webhookEvent: 'jira:issue_updated'; + issue: { + id: string; + key: string; + fields: { + labels: string[]; + }; + }; +} + +export interface IssueCommented extends BaseIssueEvent { + issue_event_type_name: 'issue_commented'; + comment: { + body: string; + }; +} + +export interface IssueUpdated extends BaseIssueEvent { + issue_event_type_name: 'issue_generic' | 'issue_resolved'; + changelog: { + items: { + field: string; + fromString: string; + toString: string; + }[]; + }; +} + +export type JiraEvent = IssueCommented | IssueUpdated; + +export class JiraService { + constructor( + private readonly logger: Logger, + private readonly cloudEventService: CloudEventService, + ) {} + + public async handleEvent(jiraEvent: JiraEvent | undefined): Promise { + if (!jiraEvent) { + this.logger.warn('Received empty event'); + return; + } + + if (jiraEvent.issue_event_type_name === 'issue_resolved') { + const newStatus = jiraEvent.changelog.items.find( + item => item.field === 'status', + )?.toString; + const label = jiraEvent.issue.fields.labels.find(l => + l.includes('workflowId'), + ); + if (!label) { + this.logger.warn('Received event without JIRA label'); + return; + } + + const workflowInstanceId = label.slice(label.indexOf('=') + 1); + if (newStatus === 'Done' || newStatus === 'Resolved') { + const response = await this.cloudEventService.send({ + event: new CloudEvent({ + type: 'jira_webhook_callback', // same defined in the workflow + source: 'jira', + kogitoprocrefid: workflowInstanceId, // correlation + data: jiraEvent, + }), + }); + + if (!response.success) { + this.logger.error(`Failed to send cloud event: ${response.error}`); + } + } + } + } +} diff --git a/plugins/orchestrator-backend/src/service/OpenApiService.ts b/plugins/orchestrator-backend/src/service/OpenApiService.ts new file mode 100644 index 0000000000..067aad1ee0 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/OpenApiService.ts @@ -0,0 +1,134 @@ +import { DiscoveryApi } from '@backstage/core-plugin-api'; + +import { Logger } from 'winston'; + +import openApiTemplate from './openapi-template.json'; + +export class OpenApiService { + logger: Logger; + discovery: DiscoveryApi; + + constructor(logger: Logger, discovery: DiscoveryApi) { + this.logger = logger; + this.discovery = discovery; + } + + private async fetchScaffolderActions(): Promise { + const scaffolderUrl = await this.discovery.getBaseUrl('scaffolder'); + return fetch(`${scaffolderUrl}/v2/actions`).then(res => { + return res.json(); + }); + } + + async generateOpenApi(): Promise { + const template = { ...openApiTemplate }; + return this.fetchScaffolderActions() + .then(actions => { + template.paths = this.mapPaths(actions); + return actions; + }) + .then(actions => this.mapSchemas(actions)) + .then(schemas => { + template.components.schemas = schemas; + return template; + }) + .catch(err => { + this.logger.error(err); + }); + } + + private mapPaths(actions: any): any { + const paths: any = {}; + for (const action of actions) { + const actionId: string = action.id; + const description = action.description; + const schemaName = this.generateSchemaName(actionId); + + const path = `/actions/${actionId}`; + paths[path] = { + post: { + operationId: actionId, + description: description, + requestBody: { + description: `Input parameters for the action ${actionId} in BS`, + required: true, + content: { + 'application/json': { + schema: { + $ref: `#/components/schemas/${schemaName}`, + }, + }, + }, + }, + responses: { + default: { + description: `Action ${actionId} response`, + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }; + } + return paths; + } + + private deleteInvalidPropertiesDfs(object: any): void { + if ( + !object || + Object.prototype.toString.call(object) !== '[object Object]' + ) { + return; + } + Object.entries(object).forEach(([k, v]) => { + const invalidConstProperty = k === 'const' && v === '*'; + const invalidTypeArray = k === 'type' && v === 'array' && !object.items; + if (invalidConstProperty) { + delete object[k]; + } else if (invalidTypeArray) { + // invalid array type that does not contain items property fallback to string + object[k] = 'string'; + } else { + this.deleteInvalidPropertiesDfs(object[k]); + } + }); + } + + private mapSchemas(actions: any): any { + const schemas: any = {}; + + for (const action of actions) { + const actionId: string = action.id; + const schema = action.schema; + const input = schema.input; + const schemaName = this.generateSchemaName(actionId); + + // removing invalid attribute + delete input.$schema; + + this.deleteInvalidPropertiesDfs(input); + + if (input.properties) { + Object.keys(input.properties).forEach(key => { + const prop = input.properties[key]; + if (prop && Array.isArray(prop.type)) { + // type: [string, boolean] is invalid + prop.type = prop.type.pop(); + } + }); + } + + schemas[schemaName] = input; + } + return schemas; + } + + private generateSchemaName(actionId: string): string { + return actionId?.replaceAll(':', '_') ?? actionId; + } +} diff --git a/plugins/orchestrator-backend/src/service/ScaffolderService.ts b/plugins/orchestrator-backend/src/service/ScaffolderService.ts new file mode 100644 index 0000000000..30f664d4e9 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/ScaffolderService.ts @@ -0,0 +1,103 @@ +import { UrlReader } from '@backstage/backend-common'; +import { CatalogApi } from '@backstage/catalog-client'; +import { Config } from '@backstage/config'; +import { ScmIntegrations } from '@backstage/integration'; +import { + createBuiltinActions, + TemplateActionRegistry, +} from '@backstage/plugin-scaffolder-backend'; +import { + ActionContext, + TemplateAction, +} from '@backstage/plugin-scaffolder-node'; +import { JsonObject, JsonValue } from '@backstage/types'; + +import fs from 'fs-extra'; +import { Logger } from 'winston'; + +import { randomUUID } from 'crypto'; +import path from 'path'; +import { PassThrough } from 'stream'; + +import { getWorkingDirectory } from './Helper'; + +export interface ActionExecutionContext { + actionId: string; + instanceId: string | undefined; + input: JsonObject; +} + +export class ScaffolderService { + private actionRegistry: TemplateActionRegistry; + private streamLogger = new PassThrough(); + + constructor( + private readonly logger: Logger, + private readonly config: Config, + private readonly catalogApi: CatalogApi, + private readonly urlReader: UrlReader, + ) { + this.actionRegistry = new TemplateActionRegistry(); + } + + public loadActions(): void { + const actions = [ + ...createBuiltinActions({ + integrations: ScmIntegrations.fromConfig(this.config), + catalogClient: this.catalogApi, + reader: this.urlReader, + config: this.config, + }), + ]; + actions.forEach(a => this.actionRegistry.register(a)); + } + + public getAction(id: string): TemplateAction { + return this.actionRegistry.get(id); + } + + public async executeAction( + actionExecutionContext: ActionExecutionContext, + ): Promise { + if (this.actionRegistry.list().length === 0) { + this.loadActions(); + } + + const action: TemplateAction = this.getAction( + actionExecutionContext.actionId, + ); + const stepOutput: { [outputName: string]: JsonValue } = {}; + + let workspacePath: string; + try { + const workingDirectory = await getWorkingDirectory( + this.config, + this.logger, + ); + workspacePath = path.join( + workingDirectory, + actionExecutionContext.instanceId ?? randomUUID(), + ); + } catch (err) { + this.logger.error( + `Error getting workingDirecotury to execute action ${actionExecutionContext.actionId}`, + err, + ); + throw err; + } + const mockContext: ActionContext = { + input: actionExecutionContext.input, + workspacePath: workspacePath, + logger: this.logger, + logStream: this.streamLogger, + createTemporaryDirectory: async () => + await fs.mkdtemp(`${workspacePath}_step-${0}-`), + output(name: string, value: JsonValue) { + stepOutput[name] = value; + }, + }; + await action.handler(mockContext); + + return stepOutput; + } +} diff --git a/plugins/orchestrator-backend/src/service/SonataFlowService.ts b/plugins/orchestrator-backend/src/service/SonataFlowService.ts new file mode 100644 index 0000000000..6d991a023f --- /dev/null +++ b/plugins/orchestrator-backend/src/service/SonataFlowService.ts @@ -0,0 +1,456 @@ +import { Config } from '@backstage/config'; + +import { OpenAPIV3 } from 'openapi-types'; +import { Logger } from 'winston'; + +import { + DEFAULT_SONATAFLOW_BASE_URL, + DEFAULT_SONATAFLOW_CONTAINER_IMAGE, + DEFAULT_SONATAFLOW_PERSISTANCE_PATH, + DEFAULT_WORKFLOWS_PATH, + fromWorkflowSource, + getWorkflowCategory, + ProcessInstance, + ProcessInstanceStateValues, + WorkflowDefinition, + WorkflowExecutionResponse, + WorkflowInfo, + WorkflowItem, + WorkflowOverview, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { spawn } from 'child_process'; +import { join, resolve } from 'path'; + +import { DataIndexService } from './DataIndexService'; +import { executeWithRetry } from './Helper'; + +const SONATA_FLOW_RESOURCES_PATH = + '/home/kogito/serverless-workflow-project/src/main/resources'; + +interface SonataFlowSource { + uri: string; +} + +interface LauncherCommand { + command: string; + args: string[]; +} + +interface SonataFlowConnectionConfig { + host: string; + port?: number; + containerImage: string; + resourcesPath: string; + persistencePath: string; + autoStart: boolean; + jira?: JiraConfig; +} + +interface JiraConfig { + host: string; + bearerToken: string; +} + +export class SonataFlowService { + private readonly connection: SonataFlowConnectionConfig; + private dataIndex: DataIndexService; + + constructor( + config: Config, + dataIndexService: DataIndexService, + private readonly logger: Logger, + ) { + this.connection = this.extractConnectionConfig(config); + this.dataIndex = dataIndexService; + } + + public get autoStart(): boolean { + return this.connection.autoStart; + } + + public get resourcesPath(): string { + return this.connection.resourcesPath; + } + + public async connect(): Promise { + if (!this.connection.autoStart) { + return true; + } + + const isAlreadyUp = await this.isSonataFlowUp(false, this.devmodeUrl); + if (isAlreadyUp) { + return true; + } + + this.launchSonataFlow(); + return await this.isSonataFlowUp(true, this.devmodeUrl); + } + + public get devmodeUrl(): string { + if (!this.connection.port) { + return this.connection.host; + } + return `${this.connection.host}:${this.connection.port}`; + } + + public async fetchWorkflowUri( + workflowId: string, + ): Promise { + try { + const endpoint = + (await this.dataIndex.getWorkflowDefinition(workflowId)).serviceUrl ?? + ''; + const urlToFetch = `${endpoint}/management/processes/${workflowId}/sources`; + const response = await executeWithRetry(() => fetch(urlToFetch)); + + if (response.ok) { + const json = (await response.json()) as SonataFlowSource[]; + // Assuming only one source in the list + return json.pop()?.uri; + } + const responseStr = JSON.stringify(response); + this.logger.error( + `Response was NOT okay when fetch(${urlToFetch}). Received response: ${responseStr}`, + ); + } catch (error) { + this.logger.error(`Error when fetching workflow uri: ${error}`); + } + return undefined; + } + + public async fetchWorkflowInfo( + workflowId: string, + endpoint: string, + ): Promise { + try { + const urlToFetch = `${endpoint}/management/processes/${workflowId}`; + const response = await executeWithRetry(() => fetch(urlToFetch)); + + if (response.ok) { + return await response.json(); + } + const responseStr = JSON.stringify(response); + this.logger.error( + `Response was NOT okay when fetch(${urlToFetch}). Received response: ${responseStr}`, + ); + } catch (error) { + this.logger.error(`Error when fetching workflow info: ${error}`); + } + + return undefined; + } + + public async fetchWorkflowDefinition( + workflowId: string, + ): Promise { + try { + const source = await this.dataIndex.fetchWorkflowSource(workflowId); + if (source) { + return fromWorkflowSource(source); + } + } catch (error) { + this.logger.error(`Error when fetching workflow definition: ${error}`); + } + return undefined; + } + + public async fetchOpenApi( + endpoint: string, + ): Promise { + try { + const urlToFetch = `${endpoint}/q/openapi.json`; + const response = await executeWithRetry(() => fetch(urlToFetch)); + if (response.ok) { + return await response.json(); + } + const responseStr = JSON.stringify(response); + this.logger.error( + `Response was NOT okay when fetch(${urlToFetch}). Received response: ${responseStr}`, + ); + } catch (error) { + this.logger.error(`Error when fetching openapi: ${error}`); + } + return undefined; + } + + public async fetchWorkflows( + endpoint: string, + ): Promise { + try { + const urlToFetch = `${endpoint}/management/processes`; + const response = await executeWithRetry(() => fetch(urlToFetch)); + + if (response.ok) { + const workflowIds = (await response.json()) as string[]; + if (!workflowIds?.length) { + return []; + } + const items = await Promise.all( + workflowIds.map(async (workflowId: string) => { + const definition = await this.fetchWorkflowDefinition(workflowId); + if (!definition) { + return undefined; + } + const uri = await this.fetchWorkflowUri(workflowId); + if (!uri) { + return undefined; + } + return { + uri, + definition: { + ...definition, + description: definition.description ?? definition.name, + }, + } as WorkflowItem; + }), + ); + return items.filter((item): item is WorkflowItem => !!item); + } + const responseStr = JSON.stringify(response); + this.logger.error( + `Response was NOT okay when fetch(${urlToFetch}). Received response: ${responseStr}`, + ); + } catch (error) { + this.logger.error(`Error when fetching workflows: ${error}`); + } + return undefined; + } + + public async fetchWorkflowOverviews(): Promise< + WorkflowOverview[] | undefined + > { + try { + const workflowDefinitions = await this.dataIndex.getWorkflowDefinitions(); + if (!workflowDefinitions?.length) { + return []; + } + const items = await Promise.all( + workflowDefinitions + .filter(def => def.id) + .map(async (def: WorkflowInfo) => this.fetchWorkflowOverview(def.id)), + ); + return items.filter((item): item is WorkflowOverview => !!item); + } catch (error) { + this.logger.error( + `Error when fetching workflows for workflowOverview: ${error}`, + ); + } + return undefined; + } + + public async executeWorkflow(args: { + workflowId: string; + endpoint: string; + inputData: Record; + }): Promise { + try { + const workflowEndpoint = args.inputData?.businessKey + ? `${args.endpoint}/${args.workflowId}?businessKey=${args.inputData.businessKey}` + : `${args.endpoint}/${args.workflowId}`; + const response = await fetch(workflowEndpoint, { + method: 'POST', + body: JSON.stringify(args.inputData), + headers: { 'content-type': 'application/json' }, + }); + return response.json(); + } catch (error) { + this.logger.error(`Error when executing workflow: ${error}`); + } + return undefined; + } + + private launchSonataFlow(): void { + const launcherCmd = this.createLauncherCommand(); + + this.logger.info( + `Auto starting SonataFlow through: ${ + launcherCmd.command + } ${launcherCmd.args.join(' ')}`, + ); + + const process = spawn(launcherCmd.command, launcherCmd.args, { + shell: false, + }); + + process.on('close', code => { + this.logger.info(`SonataFlow process exited with code ${code}`); + }); + + process.on('exit', code => { + this.logger.info(`SonataFlow process exited with code ${code}`); + }); + + process.on('error', error => { + this.logger.error(`SonataFlow process error: ${error}`); + }); + } + + private async isSonataFlowUp( + withRetry: boolean, + endpoint: string, + ): Promise { + const healthUrl = `${endpoint}/q/health`; + this.logger.info(`Checking SonataFlow health at: ${healthUrl}`); + + try { + const response = await executeWithRetry( + () => fetch(healthUrl), + withRetry ? 15 : 1, + ); + if (response.ok) { + this.logger.info('SonataFlow is up and running'); + return true; + } + } catch (e) { + this.logger.error(`Error when checking SonataFlow health: ${e}`); + } + return false; + } + private createLauncherCommand(): LauncherCommand { + const resourcesAbsPath = resolve( + join(this.connection.resourcesPath, DEFAULT_WORKFLOWS_PATH), + ); + + const launcherArgs = [ + 'run', + '--add-host', + 'host.docker.internal:host-gateway', + ]; + + if (this.connection.jira) { + launcherArgs.push(`--add-host`, `jira.test:${this.connection.jira.host}`); + } + + launcherArgs.push('--rm'); + launcherArgs.push('-e', `QUARKUS_HTTP_PORT=${this.connection.port}`); + + launcherArgs.push('-p', `${this.connection.port}:${this.connection.port}`); + launcherArgs.push('-e', `KOGITO_SERVICE_URL=${this.devmodeUrl}`); + launcherArgs.push( + '-v', + `${resourcesAbsPath}:${SONATA_FLOW_RESOURCES_PATH}`, + ); + launcherArgs.push('-e', 'KOGITO.CODEGEN.PROCESS.FAILONERROR=false'); + launcherArgs.push( + '-e', + `QUARKUS_EMBEDDED_POSTGRESQL_DATA_DIR=${this.connection.persistencePath}`, + ); + + if (this.connection.jira) { + launcherArgs.push( + '-e', + 'QUARKUS_REST_CLIENT_JIRA_OPENAPI_JSON_URL=http://jira.test:8080 -e ', + ); + launcherArgs.push(`JIRABEARERTOKEN=${this.connection.jira.bearerToken}`); + } + + launcherArgs.push(this.connection.containerImage); + + return { + command: 'docker', + args: launcherArgs, + }; + } + + private extractConnectionConfig(config: Config): SonataFlowConnectionConfig { + const autoStart = + config.getOptionalBoolean('orchestrator.sonataFlowService.autoStart') ?? + false; + + const host = + config.getOptionalString('orchestrator.sonataFlowService.baseUrl') ?? + DEFAULT_SONATAFLOW_BASE_URL; + const port = config.getOptionalNumber( + 'orchestrator.sonataFlowService.port', + ); + + const resourcesPath = + config.getOptionalString( + 'orchestrator.sonataFlowService.workflowsSource.localPath', + ) ?? ''; + + const containerImage = + config.getOptionalString('orchestrator.sonataFlowService.container') ?? + DEFAULT_SONATAFLOW_CONTAINER_IMAGE; + + const persistencePath = + config.getOptionalString( + 'orchestrator.sonataFlowService.persistence.path', + ) ?? DEFAULT_SONATAFLOW_PERSISTANCE_PATH; + + const jiraHost = config.getOptionalString('orchestrator.jira.host'); + const jiraBearerToken = config.getOptionalString( + 'orchestrator.jira.bearerToken', + ); + + const jiraConfig: JiraConfig | undefined = + jiraHost && jiraBearerToken + ? { + host: jiraHost, + bearerToken: jiraBearerToken, + } + : undefined; + + return { + autoStart, + host, + port, + containerImage, + resourcesPath, + persistencePath, + jira: jiraConfig, + }; + } + + public async fetchWorkflowOverview( + workflowId: string, + ): Promise { + const definition = await this.fetchWorkflowDefinition(workflowId); + if (!definition) { + this.logger.debug(`Workflow definition not found: ${workflowId}`); + return undefined; + } + let processInstances: ProcessInstance[] = []; + const limit = 10; + let offset: number = 0; + + let lastTriggered: Date = new Date(0); + let lastRunStatus: ProcessInstanceStateValues | undefined; + let counter = 0; + let totalDuration = 0; + + do { + processInstances = await this.dataIndex.fetchWorkflowInstances( + definition.id, + limit, + offset, + ); + + for (const pInstance of processInstances) { + if (new Date(pInstance.start) > lastTriggered) { + lastTriggered = new Date(pInstance.start); + lastRunStatus = pInstance.state; + } + if (pInstance.start && pInstance.end) { + const start: Date = new Date(pInstance.start); + const end: Date = new Date(pInstance.end); + totalDuration += end.valueOf() - start.valueOf(); + counter++; + } + } + offset += limit; + } while (processInstances.length > 0); + + return { + workflowId: definition.id, + name: definition.name, + uri: await this.fetchWorkflowUri(workflowId), + lastTriggeredMs: lastTriggered.getTime(), + lastRunStatus, + category: getWorkflowCategory(definition), + avgDurationMs: counter ? totalDuration / counter : undefined, + description: definition.description, + }; + } +} diff --git a/plugins/orchestrator-backend/src/service/WorkflowService.ts b/plugins/orchestrator-backend/src/service/WorkflowService.ts new file mode 100644 index 0000000000..d0b2e79b2e --- /dev/null +++ b/plugins/orchestrator-backend/src/service/WorkflowService.ts @@ -0,0 +1,224 @@ +import { Config } from '@backstage/config'; + +import fs from 'fs-extra'; +import { Logger } from 'winston'; + +import { + ACTIONS_OPEN_API_FILE_PATH, + DEFAULT_WORKFLOWS_PATH, + extractWorkflowFormatFromUri, + fromWorkflowSource, + SCHEMAS_FOLDER, + SPEC_FILES, + toWorkflowString, + WorkflowItem, + WorkflowSpecFile, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { extname, join, resolve } from 'path'; + +import { DataInputSchemaService } from './DataInputSchemaService'; +import { GitService } from './GitService'; +import { OpenApiService } from './OpenApiService'; +import { SonataFlowService } from './SonataFlowService'; + +export class WorkflowService { + private readonly openApiService: OpenApiService; + private readonly dataInputSchemaService: DataInputSchemaService; + private readonly sonataFlowService: SonataFlowService; + private readonly logger: Logger; + private readonly githubService: GitService; + private readonly repoURL: string; + private readonly autoPush: boolean; + constructor( + openApiService: OpenApiService, + dataInputSchemaService: DataInputSchemaService, + sonataFlowService: SonataFlowService, + config: Config, + logger: Logger, + ) { + this.openApiService = openApiService; + this.dataInputSchemaService = dataInputSchemaService; + this.sonataFlowService = sonataFlowService; + this.logger = logger; + this.githubService = new GitService(logger, config); + this.repoURL = + config.getOptionalString( + 'orchestrator.sonataFlowService.workflowsSource.gitRepositoryUrl', + ) ?? ''; + this.autoPush = + config.getOptionalBoolean( + 'orchestrator.sonataFlowService.workflowsSource.autoPush', + ) ?? false; + } + + async saveWorkflowDefinition(item: WorkflowItem): Promise { + const workflowFormat = extractWorkflowFormatFromUri(item.uri); + const definitionsPath = this.resolveResourcePath( + `${item.definition?.id}.sw.${workflowFormat}`, + ); + const dataInputSchemaPath = await this.saveDataInputSchema(item); + if (dataInputSchemaPath && item.definition) { + item.definition.dataInputSchema = dataInputSchemaPath; + } + + await this.saveFile(definitionsPath, item.definition); + + if (this.autoPush) { + await this.githubService.push( + this.sonataFlowService.resourcesPath, + `new workflow changes ${definitionsPath}`, + ); + } + + return item; + } + + private async saveFile(path: string, data: any): Promise { + this.logger.info(`Saving file ${path}`); + const fileExtension = extname(path); + const isWorkflow = /\.sw\.(json|yaml|yml)$/.test(path); + let contentToSave; + if (isWorkflow) { + contentToSave = toWorkflowString( + data, + fileExtension === '.json' ? 'json' : 'yaml', + ); + } else if (fileExtension === '.json') { + contentToSave = JSON.stringify(data, null, 2); + } else { + contentToSave = data; + } + await fs.writeFile(path, contentToSave, 'utf8'); + } + + async saveWorkflowDefinitionFromUrl(url: string): Promise { + const workflow = await this.fetchWorkflowDefinitionFromUrl(url); + await this.saveWorkflowDefinition(workflow); + return workflow; + } + + async fetchWorkflowDefinitionFromUrl(url: string): Promise { + const response = await fetch(url); + const content = await response.text(); + const definition = fromWorkflowSource(content); + const urlParts = url.split('/'); + const fileName = urlParts[urlParts.length - 1]; + return { + uri: fileName, + definition, + }; + } + + async saveOpenApi(): Promise { + const path = this.resolveResourcePath(ACTIONS_OPEN_API_FILE_PATH); + const openApi = await this.openApiService.generateOpenApi(); + if (!openApi) { + return; + } + await this.saveFile(path, openApi); + + if (this.autoPush) { + await this.githubService.push( + this.sonataFlowService.resourcesPath, + `new openapi changes ${path}`, + ); + } + } + + async saveDataInputSchema( + workflowItem: WorkflowItem, + ): Promise { + if (!workflowItem.definition) { + return undefined; + } + const openApi = await this.openApiService.generateOpenApi(); + const dataInputSchema = await this.dataInputSchemaService.generate({ + definition: workflowItem.definition, + openApi, + }); + + if (!dataInputSchema) { + return undefined; + } + + const workflowDataInputSchemaPath = join( + SCHEMAS_FOLDER, + dataInputSchema.compositionSchema.fileName, + ); + + dataInputSchema.compositionSchema.jsonSchema = { + $id: `classpath:/${workflowDataInputSchemaPath}`, + ...dataInputSchema.compositionSchema.jsonSchema, + }; + + dataInputSchema.actionSchemas.forEach(actionSchema => { + actionSchema.jsonSchema = { + $id: `classpath:/${SCHEMAS_FOLDER}/${actionSchema.fileName}`, + ...actionSchema.jsonSchema, + }; + }); + + const schemaFiles = [ + dataInputSchema.compositionSchema, + ...dataInputSchema.actionSchemas, + ]; + + const saveSchemaPromises = schemaFiles.map(schemaFile => { + const path = this.resolveResourcePath( + join(SCHEMAS_FOLDER, schemaFile.fileName), + ); + return this.saveFile(path, schemaFile.jsonSchema); + }); + + await Promise.all(saveSchemaPromises); + + return workflowDataInputSchemaPath; + } + + async deleteWorkflowDefinitionById(uri: string): Promise { + const definitionsPath = this.resolveResourcePath(uri); + await fs.rm(definitionsPath, { force: true }); + } + + async listStoredSpecs(): Promise { + const specs: WorkflowSpecFile[] = []; + // We can list all spec files from FS but let's keep it simple for now + for (const relativePath of SPEC_FILES) { + const path = this.resolveResourcePath(relativePath); + if (!(await fs.pathExists(path))) { + continue; + } + const buffer = await fs.readFile(path); + const content = JSON.parse(buffer.toString('utf8')); + specs.push({ path, content }); + } + return specs; + } + + private resolveResourcePath(relativePath: string): string { + return resolve( + join( + this.sonataFlowService.resourcesPath, + DEFAULT_WORKFLOWS_PATH, + relativePath, + ), + ); + } + async reloadWorkflows() { + if (!this.repoURL) { + this.logger.info('No Git repository configured. Skipping reload.'); + return; + } + + this.logger.info('Reloading workflows from Git'); + const localPath = this.sonataFlowService.resourcesPath; + if (await fs.pathExists(localPath)) { + this.logger.info(`Path ${localPath} already exists. Skipping clone.`); + return; + } + + await fs.remove(localPath); + await this.githubService.clone(this.repoURL, localPath); + } +} diff --git a/plugins/orchestrator-backend/src/service/openapi-template.json b/plugins/orchestrator-backend/src/service/openapi-template.json new file mode 100644 index 0000000000..378edad507 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/openapi-template.json @@ -0,0 +1,79 @@ +{ + "openapi": "3.0.1", + "info": { + "title": "Workflow Actions for BS API", + "description": "Workflow Actions BS API", + "version": "0.0.1" + }, + "servers": [ + { + "url": "http://host.docker.internal:7007/api/orchestrator" + } + ], + "paths": { + "/actions/catalog:fetch": { + "post": { + "operationId": "catalog:fetch", + "parameters": [], + "requestBody": { + "description": "Input parameters for the action in BS", + "required": true, + "content": { + "application/json;charset=utf-8": { + "schema": { + "$ref": "#/components/schemas/catalogFetch" + } + } + } + }, + "responses": { + "default": { + "description": "catalog:fetch action response", + "content": { + "application/json;charset=utf-8": { + "schema": { + "type": "object" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "catalogFetch": { + "title": "catalog:fetch", + "type": "object", + "properties": { + "entityRef": { + "type": "string", + "description": "Entity reference of the entity to get" + }, + "entityRefs": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Entity references of the entities to get" + }, + "optional": { + "type": "boolean", + "description": "Allow the entity or entities to optionally exist. Default: false" + }, + "defaultKind": { + "type": "string", + "description": "The default kind" + }, + "defaultNamespace": { + "type": "string", + "description": "The default namespace" + } + }, + "required": ["entityRef"], + "additionalProperties": false + } + } + } +} diff --git a/plugins/orchestrator-backend/src/service/router.ts b/plugins/orchestrator-backend/src/service/router.ts new file mode 100644 index 0000000000..96faa7fa58 --- /dev/null +++ b/plugins/orchestrator-backend/src/service/router.ts @@ -0,0 +1,430 @@ +import { errorHandler } from '@backstage/backend-common'; +import { DiscoveryApi } from '@backstage/core-plugin-api'; +import { ScmIntegrations } from '@backstage/integration'; +import { JsonObject, JsonValue } from '@backstage/types'; + +import express from 'express'; +import Router from 'express-promise-router'; +import { JSONSchema7 } from 'json-schema'; + +import { + fromWorkflowSource, + ORCHESTRATOR_SERVICE_READY_TOPIC, + WorkflowDataInputSchemaResponse, + WorkflowDefinition, + WorkflowInfo, + WorkflowItem, + WorkflowListResult, + WorkflowOverviewListResult, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { RouterArgs } from '../routerWrapper'; +import { ApiResponseBuilder } from '../types/apiResponse'; +import { CloudEventService } from './CloudEventService'; +import { DataIndexService } from './DataIndexService'; +import { DataInputSchemaService } from './DataInputSchemaService'; +import { JiraEvent, JiraService } from './JiraService'; +import { OpenApiService } from './OpenApiService'; +import { ScaffolderService } from './ScaffolderService'; +import { SonataFlowService } from './SonataFlowService'; +import { WorkflowService } from './WorkflowService'; + +export async function createBackendRouter( + args: RouterArgs & { + sonataFlowService: SonataFlowService; + dataIndexService: DataIndexService; + }, +): Promise { + const { eventBroker, config, logger, discovery, catalogApi, urlReader } = + args; + + const router = Router(); + router.use(express.json()); + router.use('/workflows', express.text()); + + router.get('/health', (_, response) => { + logger.info('PONG!'); + response.json({ status: 'ok' }); + }); + + const githubIntegration = ScmIntegrations.fromConfig(config) + .github.list() + .pop(); + + const githubToken = githubIntegration?.config.token; + + if (!githubToken) { + logger.warn( + 'No GitHub token found. Some features may not work as expected.', + ); + } + + const cloudEventService = new CloudEventService(logger); + const jiraService = new JiraService(logger, cloudEventService); + const openApiService = new OpenApiService(logger, discovery); + const dataInputSchemaService = new DataInputSchemaService( + logger, + githubToken, + ); + + const workflowService = new WorkflowService( + openApiService, + dataInputSchemaService, + args.sonataFlowService, + config, + logger, + ); + + const scaffolderService: ScaffolderService = new ScaffolderService( + logger, + config, + catalogApi, + urlReader, + ); + + await workflowService.reloadWorkflows(); + + setupInternalRoutes( + router, + args.sonataFlowService, + workflowService, + openApiService, + jiraService, + args.dataIndexService, + dataInputSchemaService, + ); + setupExternalRoutes(router, discovery, scaffolderService); + + await eventBroker.publish({ + topic: ORCHESTRATOR_SERVICE_READY_TOPIC, + eventPayload: {}, + }); + + router.use(errorHandler()); + return router; +} + +// ====================================================== +// Internal Backstage API calls to delegate to SonataFlow +// ====================================================== +function setupInternalRoutes( + router: express.Router, + sonataFlowService: SonataFlowService, + workflowService: WorkflowService, + openApiService: OpenApiService, + jiraService: JiraService, + dataIndexService: DataIndexService, + dataInputSchemaService: DataInputSchemaService, +) { + router.get('/workflows/definitions', async (_, response) => { + const swfs = await dataIndexService.getWorkflowDefinitions(); + response.json(ApiResponseBuilder.SUCCESS_RESPONSE(swfs)); + }); + + router.get('/workflows/overview', async (_, res) => { + const overviews = await sonataFlowService.fetchWorkflowOverviews(); + + if (!overviews) { + res.status(500).send("Couldn't fetch workflow overviews"); + return; + } + + const result: WorkflowOverviewListResult = { + items: overviews, + limit: 0, + offset: 0, + totalCount: overviews?.length ?? 0, + }; + res.status(200).json(result); + }); + + router.get('/workflows', async (_, res) => { + const definitions: WorkflowInfo[] = + await dataIndexService.getWorkflowDefinitions(); + const items: WorkflowItem[] = await Promise.all( + definitions.map(async info => { + const uri = await sonataFlowService.fetchWorkflowUri(info.id); + if (!uri) { + throw new Error(`Uri is required for workflow ${info.id}`); + } + const item: WorkflowItem = { + definition: info as WorkflowDefinition, + serviceUrl: info.serviceUrl, + uri, + }; + return item; + }), + ); + + if (!items) { + res.status(500).send("Couldn't fetch workflows"); + return; + } + + const result: WorkflowListResult = { + items: items, + limit: 0, + offset: 0, + totalCount: items?.length ?? 0, + }; + res.status(200).json(result); + }); + + router.get('/workflows/:workflowId', async (req, res) => { + const { + params: { workflowId }, + } = req; + + const definition = + await sonataFlowService.fetchWorkflowDefinition(workflowId); + + if (!definition) { + res + .status(500) + .send(`Couldn't fetch workflow definition for ${workflowId}`); + return; + } + + const uri = await sonataFlowService.fetchWorkflowUri(workflowId); + if (!uri) { + res.status(500).send(`Couldn't fetch workflow uri for ${workflowId}`); + return; + } + + res.status(200).json({ + uri, + definition, + }); + }); + + router.delete('/workflows/:workflowId/abort', async (req, res) => { + const { + params: { workflowId }, + } = req; + + const result = await dataIndexService.abortWorkflowInstance(workflowId); + + if (result.error) { + res.status(500).json(result.error); + return; + } + + res.status(200).json(result.data); + }); + + router.post('/workflows/:workflowId/execute', async (req, res) => { + const { + params: { workflowId }, + } = req; + + const definition = await dataIndexService.getWorkflowDefinition(workflowId); + const serviceUrl = definition.serviceUrl; + if (!serviceUrl) { + throw new Error(`ServiceURL is not defined for workflow ${workflowId}`); + } + const executionResponse = await sonataFlowService.executeWorkflow({ + workflowId, + inputData: req.body, + endpoint: serviceUrl, + }); + + if (!executionResponse) { + res.status(500).send(`Couldn't execute workflow ${workflowId}`); + return; + } + + res.status(200).json(executionResponse); + }); + + router.get('/workflows/:workflowId/overview', async (req, res) => { + const { + params: { workflowId }, + } = req; + const overviewObj = + await sonataFlowService.fetchWorkflowOverview(workflowId); + + if (!overviewObj) { + res + .status(500) + .send(`Couldn't fetch workflow overview for ${workflowId}`); + return; + } + res.status(200).json(overviewObj); + }); + + router.get('/instances', async (_, res) => { + const instances = await dataIndexService.fetchProcessInstances(); + + if (!instances) { + res.status(500).send("Couldn't fetch process instances"); + return; + } + + res.status(200).json(instances); + }); + + router.get('/instances/:instanceId', async (req, res) => { + const { + params: { instanceId }, + } = req; + const instance = await dataIndexService.fetchProcessInstance(instanceId); + + if (!instance) { + res.status(500).send(`Couldn't fetch process instance ${instanceId}`); + return; + } + + res.status(200).json(instance); + }); + + router.get('/instances/:instanceId/jobs', async (req, res) => { + const { + params: { instanceId }, + } = req; + + const jobs = await dataIndexService.fetchProcessInstanceJobs(instanceId); + + if (!jobs) { + res.status(500).send(`Couldn't fetch jobs for instance ${instanceId}`); + return; + } + + res.status(200).json(jobs); + }); + + router.get('/workflows/:workflowId/inputSchema', async (req, res) => { + const { + params: { workflowId }, + } = req; + + const workflowDefinition = + await dataIndexService.getWorkflowDefinition(workflowId); + const serviceUrl = workflowDefinition.serviceUrl; + if (!serviceUrl) { + throw new Error(`ServiceUrl is not defined for workflow ${workflowId}`); + } + + // workflow source + const definition = + await sonataFlowService.fetchWorkflowDefinition(workflowId); + + if (!definition) { + res.status(500).send(`Couldn't fetch workflow definition ${workflowId}`); + return; + } + + const uri = await sonataFlowService.fetchWorkflowUri(workflowId); + + if (!uri) { + res.status(500).send(`Couldn't fetch workflow uri ${workflowId}`); + return; + } + + const workflowItem: WorkflowItem = { uri, definition }; + + let schemas: JSONSchema7[] = []; + + if (definition.dataInputSchema) { + const workflowInfo = await sonataFlowService.fetchWorkflowInfo( + workflowId, + serviceUrl, + ); + + if (!workflowInfo) { + res.status(500).send(`Couldn't fetch workflow info ${workflowId}`); + return; + } + + if (!workflowInfo.inputSchema) { + res + .status(500) + .send(`Couldn't fetch workflow input schema ${workflowId}`); + return; + } + + schemas = dataInputSchemaService.parseComposition( + workflowInfo.inputSchema, + ); + } + + const response: WorkflowDataInputSchemaResponse = { + workflowItem, + schemas, + }; + + res.status(200).json(response); + }); + + router.delete('/workflows/:workflowId', async (req, res) => { + const workflowId = req.params.workflowId; + const uri = await sonataFlowService.fetchWorkflowUri(workflowId); + + if (!uri) { + res.status(500).send(`Couldn't fetch workflow uri ${workflowId}`); + return; + } + + await workflowService.deleteWorkflowDefinitionById(uri); + res.status(200).send(); + }); + + router.post('/workflows', async (req, res) => { + const uri = req.query.uri as string; + const workflowItem = uri?.startsWith('http') + ? await workflowService.saveWorkflowDefinitionFromUrl(uri) + : await workflowService.saveWorkflowDefinition({ + uri, + definition: fromWorkflowSource(req.body), + }); + res.status(201).json(workflowItem).send(); + }); + + router.get('/actions/schema', async (_, res) => { + const openApi = await openApiService.generateOpenApi(); + res.json(openApi).status(200).send(); + }); + + router.put('/actions/schema', async (_, res) => { + const openApi = await workflowService.saveOpenApi(); + res.json(openApi).status(200).send(); + }); + + router.post('/webhook/jira', async (req, res) => { + const event = req.body as JiraEvent; + await jiraService.handleEvent(event); + res.status(200).send(); + }); + + router.get('/specs', async (_, res) => { + const specs = await workflowService.listStoredSpecs(); + res.status(200).json(specs); + }); +} + +// ====================================================== +// External SonataFlow API calls to delegate to Backstage +// ====================================================== +function setupExternalRoutes( + router: express.Router, + discovery: DiscoveryApi, + scaffolderService: ScaffolderService, +) { + router.get('/actions', async (_, res) => { + const scaffolderUrl = await discovery.getBaseUrl('scaffolder'); + const response = await fetch(`${scaffolderUrl}/v2/actions`); + const json = await response.json(); + res.status(response.status).json(json); + }); + + router.post('/actions/:actionId', async (req, res) => { + const { actionId } = req.params; + const instanceId: string | undefined = req.header('kogitoprocinstanceid'); + const body: JsonObject = (await req.body) as JsonObject; + const result: JsonValue = await scaffolderService.executeAction({ + actionId, + instanceId, + input: body, + }); + res.status(200).json(result); + }); +} diff --git a/plugins/orchestrator-backend/src/types/apiResponse.ts b/plugins/orchestrator-backend/src/types/apiResponse.ts new file mode 100644 index 0000000000..3e39c9b66c --- /dev/null +++ b/plugins/orchestrator-backend/src/types/apiResponse.ts @@ -0,0 +1,31 @@ +export interface ApiResponse { + message?: string; + result?: any; + backEndErrCd?: string; +} + +export class ApiResponseBuilder { + static SUCCESS_RESPONSE(result: any, message = 'success'): ApiResponse { + return { + result: result, + message: message, + }; + } + + static VALIDATION_ERR_RESPONSE( + backEndErrCd = 'backend validation error code', + message = 'backend validation error', + ): ApiResponse { + return { + message: message, + backEndErrCd: backEndErrCd, + }; + } + + static HTTP_ERR_RESPONSE(message = 'Internal Server Error'): ApiResponse { + return { + result: null, + message: message, + }; + } +} diff --git a/plugins/orchestrator-backend/tsconfig.json b/plugins/orchestrator-backend/tsconfig.json new file mode 100644 index 0000000000..f75e577143 --- /dev/null +++ b/plugins/orchestrator-backend/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@backstage/cli/config/tsconfig.json", + "include": ["src", "dev"], + "exclude": ["node_modules"], + "compilerOptions": { + "outDir": "../../dist-types/plugins/orchestrator-backend", + "rootDir": "." + } +} diff --git a/plugins/orchestrator-backend/turbo.json b/plugins/orchestrator-backend/turbo.json new file mode 100644 index 0000000000..8f2ea67b53 --- /dev/null +++ b/plugins/orchestrator-backend/turbo.json @@ -0,0 +1,9 @@ +{ + "extends": ["//"], + "pipeline": { + "tsc": { + "outputs": ["../../dist-types/plugins/orchestrator-backend/**"], + "dependsOn": ["^tsc"] + } + } +} diff --git a/plugins/orchestrator-common/.eslintrc.js b/plugins/orchestrator-common/.eslintrc.js new file mode 100644 index 0000000000..e2a53a6ad2 --- /dev/null +++ b/plugins/orchestrator-common/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/orchestrator-common/CHANGELOG.md b/plugins/orchestrator-common/CHANGELOG.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/orchestrator-common/OWNERS b/plugins/orchestrator-common/OWNERS new file mode 100644 index 0000000000..e09f7ec35c --- /dev/null +++ b/plugins/orchestrator-common/OWNERS @@ -0,0 +1,6 @@ +approvers: + - caponetto + - jkilzi +reviewers: + - caponetto + - jkilzi diff --git a/plugins/orchestrator-common/README.md b/plugins/orchestrator-common/README.md new file mode 100644 index 0000000000..1a98fe677a --- /dev/null +++ b/plugins/orchestrator-common/README.md @@ -0,0 +1,5 @@ +# Orchestrator Common Plugin for Backstage + +Welcome to the common package for the Orchestrator plugin! + +For more information about the Orchestrator plugin, see the [Orchestrator Plugin documentation](https://github.com/janus-idp/backstage-plugins/tree/main/plugins/orchestrator) on GitHub. diff --git a/plugins/orchestrator-common/config.d.ts b/plugins/orchestrator-common/config.d.ts new file mode 100644 index 0000000000..1985943922 --- /dev/null +++ b/plugins/orchestrator-common/config.d.ts @@ -0,0 +1,116 @@ +export interface Config { + /** + * Configuration for the Orchestrator plugin. + */ + orchestrator?: { + sonataFlowService: { + /** + * Base URL of the Sonata Flow service. + * Default: http://localhost + */ + baseUrl?: string; + /** + * Port of the Sonata Flow service. + * Default: no port + */ + port?: string; + /** + * Whether to start the Sonata Flow service automatically. + * If set to `false`, the plugin assumes that the SonataFlow service is already running on `baseUrl`:`port` (or just `baseUrl` if `port` is not set). + * Default: false + */ + autoStart?: boolean; + /** + * Workflows definitions source configurations + */ + workflowsSource?: + | { + /** + * Remote git repository where workflows definitions are stored + */ + gitRepositoryUrl: string; + /** + * Path to map workflow resources to SonataFlow service. + * Example: /home/orchestrator/workflows + */ + localPath: string; + /** + * Indicates to push changes to the gitRepository upon changes on workflows definition and resources + */ + autoPush: boolean; + } + | { + localPath: string; + }; + + /** + * Container image name of the Sonata Flow service. + * Default: quay.io/kiegroup/kogito-swf-devmode-nightly:main-2023-08-30 + */ + container?: string; + /** + * Persistance configuration of the Sonata Flow service. + */ + persistance?: { + /** + * Path in the container image to store persistance data. + * Default: /home/kogito/persistence + */ + path?: string; + }; + }; + dataIndexService: { + /** + * URL of the Data Index service. + * Example: http://localhost:8099 + */ + url: string; + }; + /** + * Configuration for the integration with the Catalog plugin. + */ + catalog?: { + /** + * Whether to enable the integration with the Catalog plugin. + * Default: false + */ + isEnabled?: boolean; + /** + * Owner of workflows to present on the component catalog. + * Default: infrastructure + */ + owner?: string; + /** + * Environment of workflows to present on the component catalog. + * Default: development + */ + environment?: string; + }; + /** + * Configuration for the workflow editor. + */ + editor?: { + /** + * Path to the envelope context (either a remote url or a local path under app/public folder). + * Default: https://start.kubesmarts.org + * @visibility frontend + */ + path?: string; + }; + /** + * Configuration for the integration with Jira API. + * Note: This is a temporary solution. We should probably use the JIRA integration config instead. + */ + jira?: { + /** + * Base URL of the Jira API. + */ + host?: string; + /** + * Token to authenticate with the Jira API. + * @visibility secret + */ + bearerToken?: string; + }; + }; +} diff --git a/plugins/orchestrator-common/package.json b/plugins/orchestrator-common/package.json new file mode 100644 index 0000000000..097001e50a --- /dev/null +++ b/plugins/orchestrator-common/package.json @@ -0,0 +1,48 @@ +{ + "name": "@janus-idp/backstage-plugin-orchestrator-common", + "version": "0.0.1", + "license": "Apache-2.0", + "main": "src/index.ts", + "types": "src/index.ts", + "publishConfig": { + "access": "public", + "main": "dist/index.cjs.js", + "module": "dist/index.esm.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "common-library" + }, + "homepage": "https://janus-idp.io/", + "repository": "github:janus-idp/backstage-plugins", + "bugs": "https://github.com/janus-idp/backstage-plugins/issues", + "keywords": [ + "backstage", + "plugin", + "orchestrator", + "workflows" + ], + "files": [ + "config.d.ts", + "dist" + ], + "configSchema": "config.d.ts", + "scripts": { + "build": "backstage-cli package build", + "tsc": "tsc", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test --passWithNoTests --coverage", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack" + }, + "dependencies": { + "@severlessworkflow/sdk-typescript": "^3.0.3", + "js-yaml": "^4.1.0", + "json-schema": "^0.4.0", + "openapi-types": "^12.1.3" + }, + "devDependencies": { + "@backstage/cli": "0.23.0" + } +} diff --git a/plugins/orchestrator-common/src/constants.ts b/plugins/orchestrator-common/src/constants.ts new file mode 100644 index 0000000000..ea51ef95f8 --- /dev/null +++ b/plugins/orchestrator-common/src/constants.ts @@ -0,0 +1,77 @@ +import { WorkflowDefinition, WorkflowSample } from './types'; + +export const ORCHESTRATOR_SERVICE_READY_TOPIC = 'orchestrator-service-ready'; + +export const EMPTY_DEFINITION: WorkflowDefinition = { + id: 'workflow_unique_identifier', + version: '0.1', + specVersion: '0.8', + name: 'Workflow name', + description: 'Workflow description', + start: 'StartState', + functions: [ + { + name: 'uniqueFunctionName', + operation: 'specs/actions-openapi.json#catalog:fetch', + }, + ], + states: [ + { + name: 'StartState', + type: 'operation', + actions: [ + { + name: 'uniqueActionName', + functionRef: { + refName: 'uniqueFunctionName', + arguments: { + entityRef: '.entityRef', + }, + }, + }, + ], + end: true, + }, + ], +}; + +export const SCHEMAS_FOLDER = 'schemas'; +export const SPECS_FOLDER = 'specs'; + +export const JIRA_OPEN_API_FILE = 'jira-openapi.json'; +export const JIRA_OPEN_API_FILE_PATH = `${SPECS_FOLDER}/${JIRA_OPEN_API_FILE}`; + +export const ACTIONS_OPEN_API_FILE = 'actions-openapi.json'; +export const ACTIONS_OPEN_API_FILE_PATH = `${SPECS_FOLDER}/${ACTIONS_OPEN_API_FILE}`; + +export const SPEC_FILES = [ACTIONS_OPEN_API_FILE_PATH, JIRA_OPEN_API_FILE_PATH]; + +export const WORKFLOW_TITLE = 'Workflow'; +export const WORKFLOW_TITLE_PLURAL = 'Workflows'; +export const WORKFLOW_TYPE = 'workflow'; + +export const WORKFLOW_JSON_SAMPLE: WorkflowSample = { + id: 'jsongreet', + url: 'https://raw.githubusercontent.com/kiegroup/kogito-examples/stable/serverless-workflow-examples/serverless-workflow-greeting-quarkus/src/main/resources/jsongreet.sw.json', +}; + +export const WORKFLOW_YAML_SAMPLE: WorkflowSample = { + id: 'yamlgreet', + url: 'https://raw.githubusercontent.com/kiegroup/kogito-examples/stable/serverless-workflow-examples/serverless-workflow-greeting-quarkus/src/main/resources/yamlgreet.sw.yml', +}; + +// Default values for the orchestrator plugin configuration +export const DEFAULT_SONATAFLOW_CONTAINER_IMAGE = + 'quay.io/kiegroup/kogito-swf-devmode-nightly:main-2024-01-08'; +export const DEFAULT_SONATAFLOW_PERSISTANCE_PATH = '/home/kogito/persistence'; +export const DEFAULT_CATALOG_OWNER = 'orchestrator'; +export const DEFAULT_CATALOG_ENVIRONMENT = 'development'; +export const DEFAULT_EDITOR_PATH = 'https://start.kubesmarts.org'; +export const DEFAULT_SONATAFLOW_BASE_URL = 'http://localhost'; + +export const DEFAULT_WORKFLOWS_PATH = 'workflows'; + +export const ASSESSMENT_WORKFLOW_TYPE = 'workflow-type/assessment'; +export const INFRASTRUCTURE_WORKFLOW_TYPE = 'workflow-type/infrastructure'; + +export const FEATURE_FLAG_DEVELOPER_MODE = 'developer-mode'; diff --git a/plugins/orchestrator-common/src/index.ts b/plugins/orchestrator-common/src/index.ts new file mode 100644 index 0000000000..d948f47d9d --- /dev/null +++ b/plugins/orchestrator-common/src/index.ts @@ -0,0 +1,4 @@ +export * from './types'; +export * from './constants'; +export * from './models'; +export * from './workflow'; diff --git a/plugins/orchestrator-common/src/models.ts b/plugins/orchestrator-common/src/models.ts new file mode 100644 index 0000000000..f92eacc5bc --- /dev/null +++ b/plugins/orchestrator-common/src/models.ts @@ -0,0 +1,110 @@ +import { WorkflowCategory, WorkflowDefinition } from './types'; + +export enum ProcessInstanceState { + Active = 'ACTIVE', + Completed = 'COMPLETED', + Aborted = 'ABORTED', + Suspended = 'SUSPENDED', + Error = 'ERROR', +} + +export type ProcessInstanceStateValues = Uppercase< + keyof typeof ProcessInstanceState +>; + +export enum MilestoneStatus { + Available = 'AVAILABLE', + Active = 'ACTIVE', + Completed = 'COMPLETED', +} + +export interface NodeInstance { + __typename?: 'NodeInstance'; + id: string; + name: string; + type: string; + enter: Date; + exit?: Date; + definitionId: string; + nodeId: string; +} + +export interface TriggerableNode { + id: number; + name: string; + type: string; + uniqueId: string; + nodeDefinitionId: string; +} + +export interface Milestone { + __typename?: 'Milestone'; + id: string; + name: string; + status: MilestoneStatus; +} + +export interface ProcessInstanceError { + __typename?: 'ProcessInstanceError'; + nodeDefinitionId: string; + message?: string; +} +export interface ProcessInstance { + id: string; + processId: string; + processName?: string; + parentProcessInstanceId?: string; + rootProcessInstanceId?: string; + rootProcessId?: string; + roles?: string[]; + state: ProcessInstanceStateValues; + endpoint: string; + serviceUrl?: string; + nodes: NodeInstance[]; + milestones?: Milestone[]; + variables?: Record | string; + start: Date; + end?: Date; + parentProcessInstance?: ProcessInstance; + childProcessInstances?: ProcessInstance[]; + error?: ProcessInstanceError; + addons?: string[]; + lastUpdate: Date; + businessKey?: string; + isSelected?: boolean; + errorMessage?: string; + isOpen?: boolean; + diagram?: string; + nodeDefinitions?: TriggerableNode[]; + source?: string; + category?: WorkflowCategory; + description?: WorkflowDefinition['description']; +} + +export enum JobStatus { + Error = 'ERROR', + Executed = 'EXECUTED', + Scheduled = 'SCHEDULED', + Retry = 'RETRY', + Canceled = 'CANCELED', +} + +export interface Job { + id: string; + processId: string; + processInstanceId: string; + rootProcessInstanceId?: string; + rootProcessId?: string; + status: JobStatus; + expirationTime: Date; + priority: number; + callbackEndpoint: string; + repeatInterval: number; + repeatLimit: number; + scheduledId: string; + retries: number; + lastUpdate: Date; + executionCounter?: number; + endpoint?: string; + nodeInstanceId?: string; +} diff --git a/plugins/orchestrator-common/src/types.ts b/plugins/orchestrator-common/src/types.ts new file mode 100644 index 0000000000..b63311212a --- /dev/null +++ b/plugins/orchestrator-common/src/types.ts @@ -0,0 +1,103 @@ +import { Specification } from '@severlessworkflow/sdk-typescript'; +import { JSONSchema7 } from 'json-schema'; +import { OpenAPIV3 } from 'openapi-types'; + +import { ProcessInstanceStateValues } from './models'; + +type Id = { [P in keyof T]: T[P] }; + +type OmitDistributive = T extends any + ? T extends object + ? Id> + : T + : never; + +export type OmitRecursively = Omit< + { [P in keyof T]: OmitDistributive }, + K +>; + +export type WorkflowDefinition = OmitRecursively< + Specification.Workflow, + 'normalize' +>; + +export interface WorkflowItem { + serviceUrl?: string; + uri: string; + definition: WorkflowDefinition; +} + +export type WorkflowListResult = { + items: WorkflowItem[]; + totalCount: number; + offset: number; + limit: number; +}; + +export type WorkflowOverviewListResult = { + items: WorkflowOverview[]; + totalCount: number; + offset: number; + limit: number; +}; + +export type WorkflowFormat = 'yaml' | 'json'; + +export interface WorkflowSample { + id: string; + url: string; +} + +export interface WorkflowSpecFile { + path: string; + content: OpenAPIV3.Document; +} +export interface WorkflowDataInputSchemaResponse { + workflowItem: WorkflowItem; + schemas: JSONSchema7[]; +} + +export interface WorkflowExecutionResponse { + id: string; +} + +export enum WorkflowCategory { + ASSESSMENT = 'assessment', + INFRASTRUCTURE = 'infrastructure', +} + +export interface WorkflowOverview { + workflowId: string; + name?: string; + uri?: string; + lastTriggeredMs?: number; + lastRunStatus?: ProcessInstanceStateValues; + category?: string; + avgDurationMs?: number; + description?: string; +} + +export interface WorkflowInfo { + id: string; + type?: string; + name?: string; + version?: string; + annotations?: string[]; + description?: string; + inputSchema?: JSONSchema7; + endpoint?: string; + serviceUrl?: string; + roles?: string[]; + source?: string; + metadata?: Map; + nodes?: Node[]; +} + +export interface Node { + id: string; + type?: string; + name?: string; + uniqueId?: string; + nodeDefinitionId?: string; +} diff --git a/plugins/orchestrator-common/src/workflow.ts b/plugins/orchestrator-common/src/workflow.ts new file mode 100644 index 0000000000..2ed638689b --- /dev/null +++ b/plugins/orchestrator-common/src/workflow.ts @@ -0,0 +1,79 @@ +import { Specification } from '@severlessworkflow/sdk-typescript'; +import { dump } from 'js-yaml'; + +import { ASSESSMENT_WORKFLOW_TYPE } from './constants'; +import { WorkflowCategory, WorkflowDefinition, WorkflowFormat } from './types'; + +export function fromWorkflowSource(content: string): WorkflowDefinition { + const parsed = Specification.Workflow.fromSource(content); + const workflow = parsed.sourceModel ?? parsed; + return removeProperty(workflow, 'normalize'); +} + +export function toWorkflowString( + definition: WorkflowDefinition, + format: WorkflowFormat, +): string { + switch (format) { + case 'json': + return toWorkflowJson(definition); + case 'yaml': + return toWorkflowYaml(definition); + default: + throw new Error(`Unsupported format ${format}`); + } +} + +export function toWorkflowJson(definition: WorkflowDefinition): string { + return JSON.stringify(definition, null, 2); +} + +export function toWorkflowYaml(definition: WorkflowDefinition): string { + return dump(definition); +} + +export function extractWorkflowFormatFromUri(uri: string): WorkflowFormat { + const match = RegExp(/\.sw\.(json|yaml|yml)$/).exec(uri); + if (match) { + if (match[1] === 'yml' || match[1] === 'yaml') { + return 'yaml'; + } + if (match[1] === 'json') { + return 'json'; + } + } + throw new Error(`Unsupported workflow format for uri ${uri}`); +} + +export function getWorkflowCategory( + definition: WorkflowDefinition | undefined, +): WorkflowCategory { + if (definition === undefined) { + return WorkflowCategory.INFRASTRUCTURE; + } + return definition?.annotations?.find( + annotation => annotation === ASSESSMENT_WORKFLOW_TYPE, + ) + ? WorkflowCategory.ASSESSMENT + : WorkflowCategory.INFRASTRUCTURE; +} + +function removeProperty(obj: T, propToDelete: string): T { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + if (Array.isArray(obj)) { + return obj.map(item => removeProperty(item, propToDelete)) as T; + } + + const newObj: any = {}; + + for (const key in obj) { + if (key !== propToDelete) { + newObj[key] = removeProperty(obj[key], propToDelete); // Recurse into nested objects + } + } + + return newObj; +} diff --git a/plugins/orchestrator-common/tsconfig.json b/plugins/orchestrator-common/tsconfig.json new file mode 100644 index 0000000000..a86c4d53d2 --- /dev/null +++ b/plugins/orchestrator-common/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@backstage/cli/config/tsconfig.json", + "include": ["src"], + "exclude": ["node_modules"], + "compilerOptions": { + "outDir": "../../dist-types/plugins/orchestrator-common", + "rootDir": "." + } +} diff --git a/plugins/orchestrator-common/turbo.json b/plugins/orchestrator-common/turbo.json new file mode 100644 index 0000000000..79100511bd --- /dev/null +++ b/plugins/orchestrator-common/turbo.json @@ -0,0 +1,9 @@ +{ + "extends": ["//"], + "pipeline": { + "tsc": { + "outputs": ["../../dist-types/plugins/orchestrator-common/**"], + "dependsOn": ["^tsc"] + } + } +} diff --git a/plugins/orchestrator/.eslintrc.js b/plugins/orchestrator/.eslintrc.js new file mode 100644 index 0000000000..e2a53a6ad2 --- /dev/null +++ b/plugins/orchestrator/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/orchestrator/CHANGELOG.md b/plugins/orchestrator/CHANGELOG.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/orchestrator/OWNERS b/plugins/orchestrator/OWNERS new file mode 100644 index 0000000000..e09f7ec35c --- /dev/null +++ b/plugins/orchestrator/OWNERS @@ -0,0 +1,6 @@ +approvers: + - caponetto + - jkilzi +reviewers: + - caponetto + - jkilzi diff --git a/plugins/orchestrator/README.md b/plugins/orchestrator/README.md new file mode 100644 index 0000000000..b09244a75f --- /dev/null +++ b/plugins/orchestrator/README.md @@ -0,0 +1,252 @@ +# Orchestrator Plugin for Backstage + +The Orchestrator for Backstage is a mechanism designed to facilitate the implementation and execution of developer self-service flows. It serves as a vital component that enhances and augments the existing scaffolder functionality of Backstage with a more flexible and powerful set of features including long-running and asynchronous flows. + +The orchestrator works harmoniously with other Backstage components such as the Software Catalog, permissions, and plugins as well as others. By leveraging its capabilities, organizations can orchestrate and coordinate developer self-service flows effectively. + +## Context + +The Backstage Orchestrator plugin aims to provide a better option to Scaffolder, based on workflows to have a more flexible and powerful tool that addresses the need by streamlining and automating processes, allowing developers to focus more on coding and innovation. + +The orchestrator relies on [SonataFlow](https://sonataflow.org/), a powerful tool for building cloud-native workflow applications. + +The main idea is to keep the same user experience for users, levering the UI components, input forms, and flow that Scaffolder provides, this way it should be straightforward for users and transparent no matter whether using Templates or Workflows, both can live together being compatible with integration points. + +The orchestrator controls the flow orchestrating operations/tasks that may be executed in any external service including Scaffolder Actions, this way it is possible to leverage any existing Action hence Templates and GPT can be easily migrated to workflows opening the door to extend them to more complex use cases. + +## Capabilities + +**Advanced core capabilities** + +- Stateful/long-lived +- Branching and parallelism +- Error management and compensation +- Event-driven supporting [CloudEvents](https://cloudevents.io) +- Audit logging +- Sub-flows +- Choreography +- Timer/timeout control +- Built-in powerful expression evaluation with JQ +- Low Code/No code +- Cloud-native architecture Kubernetes/Openshit with Operator support +- OpenAPI / REST built-in integration etc. + +**Client-side tooling** + +- Orchestration visualization / graphical editor +- Integration with service catalog/actions +- GitHub integration +- Form generation +- Runtime monitoring of instances +- Dashboards +- Potential custom integrations (user interaction, notifications, etc.) + +## For administrators + +### Installation + +The Orchestrator plugin is composed of the following packages: + +- `@janus-idp/backstage-plugin-orchestrator-backend` package connects the Backstage server to the Orchestrator. For setup process, see [Backend Setup](#setting-up-the-orchestrator-backend-package) +- `@janus-idp/backstage-plugin-orchestrator` package contains frontend components for the Orchestrator plugin. For setup process, see [Frontend Setup](#setting-up-the-orchestrator-frontend-package) +- `@janus-idp/backstage-plugin-orchestrator-common` package contains shared code between the Orchestrator plugin packages. +- `@janus-idp/backstage-plugin-catalog-backend-module-orchestrator-entity-provider` package is a backend module to the catalog plugin to build the Orchestrator entity provider. + +#### Prerequisites + +- Docker up and running (currently it is a limitation, see [Limitations](#limitations)) + +#### Setting up the configuration for the Orchestrator plugin + +The following configuration is required for the Orchestrator plugin to work properly: + +```yaml title="app-config.yaml" +orchestrator: + sonataFlowService: + baseUrl: http://localhost + port: 8899 + autoStart: true + workflowsSource: + gitRepositoryUrl: https://github.com/tiagodolphine/backstage-orchestrator-workflows + localPath: /tmp/orchestrator/repository + autoPush: true + dataIndexService: + url: ${DATA_INDEX_URL} +``` + +- when interacting with an existing Sonataflow backend service from `baseUrl` and `port`, `autoStart` needs to be unset or set to `false`, also the section of `workflowSource` can be neglected. +- set the environment variable `DATA_INDEX_URL`, which points to a running data index service accessible via data index graphql interface such as http:///graphql + +For more information about the configuration options, including other optional properties, see the [config.d.ts](../orchestrator-common/config.d.ts) file. + +- Although optional, you may also want to set up the `GITHUB_TOKEN` environment variable to allow the Orchestrator to access the GitHub API. + +#### Setting up the Orchestrator backend package + +1. Install the Orchestrator backend plugin using the following command: + + ```console + yarn workspace backend add @janus-idp/backstage-plugin-orchestrator-backend + ``` + +1. Create a new plugin instance in `packages/backend/src/plugins/orchestrator.ts` file: + + ```ts title="packages/backend/src/plugins/orchestrator.ts" + import { Router } from 'express'; + + import { createRouter } from '@janus-idp/backstage-plugin-orchestrator-backend'; + + import { PluginEnvironment } from '../types'; + + export default async function createPlugin( + env: PluginEnvironment, + ): Promise { + return await createRouter({ + eventBroker: env.eventBroker, + config: env.config, + logger: env.logger, + discovery: env.discovery, + catalogApi: env.catalogApi, + urlReader: env.reader, + }); + } + ``` + +1. Add the following code to `packages/backend/src/plugins/catalog.ts` file: + + ```ts title="packages/backend/src/plugins/catalog.ts" + /* highlight-add-next-line */ + import { OrchestratorEntityProvider } from '@janus-idp/backstage-plugin-orchestrator-backend'; + + export default async function createPlugin( + env: PluginEnvironment, + ): Promise { + const builder = await CatalogBuilder.create(env); + + /* ... other processors and/or providers ... */ + /* highlight-add-start */ + builder.addEntityProvider( + await OrchestratorEntityProvider.fromConfig({ + config: env.config, + logger: env.logger, + scheduler: env.scheduler, + discovery: env.discovery, + }), + ); + /* highlight-add-end */ + + const { processingEngine, router } = await builder.build(); + await processingEngine.start(); + return router; + } + ``` + +1. Import and plug the new instance into `packages/backend/src/index.ts` file: + + ```ts title="packages/backend/src/index.ts" + /* highlight-add-next-line */ + import orchestrator from './plugins/orchestrator'; + + async function main() { + // ... + const createEnv = makeCreateEnv(config); + // ... + /* highlight-add-next-line */ + const orchestratorEnv = useHotMemoize(module, () => + createEnv('orchestrator'), + ); + // ... + const apiRouter = Router(); + // ... + /* highlight-add-next-line */ + apiRouter.use('/orchestrator', await orchestrator(orchestratorEnv)); + // ... + } + ``` + +#### Setting up the Orchestrator frontend package + +1. Install the Orchestrator frontend plugin using the following command: + + ```console + yarn workspace app add @janus-idp/backstage-plugin-orchestrator + ``` + +1. Add a route to the `OrchestratorPage` and the customized template card component to Backstage App (`packages/app/src/App.tsx`): + + ```tsx title="packages/app/src/App.tsx" + /* highlight-add-next-line */ + import { + OrchestratorPage, + OrchestratorScaffolderTemplateCard, + } from '@janus-idp/backstage-plugin-orchestrator'; + + const routes = ( + + {/* ... */} + {/* highlight-add-start */} + + } + /> + {/* highlight-add-end */} + {/* ... */} + {/* highlight-add-next-line */} + } /> + + ); + ``` + +1. Add the Orchestrator to Backstage side bar (`packages/app/src/components/Root/Root.tsx`): + + ```tsx title="packages/app/src/components/Root/Root.tsx" + /* highlight-add-next-line */ + import { OrchestratorIcon } from '@janus-idp/backstage-plugin-orchestrator'; + + export const Root = ({ children }: PropsWithChildren<{}>) => ( + + + }> + {/* ... */} + {/* highlight-add-start */} + + {/* highlight-add-end */} + + {/* ... */} + + {children} + + ); + ``` + +## For users + +### Using the Orchestrator plugin in Backstage + +The Orchestrator plugin enhances the Backstage with the execution of developer self-service flows. It provides a graphical editor to manage workflows, and a dashboard to monitor the execution of the workflows. + +#### Prerequisites + +- Your Backstage application is installed and running. +- You have installed the Orchestrator plugin. For the installation process, see [Installation](#installation). + +#### Procedure + +1. Open your Backstage application. +1. Click the **Workflows** tab from the left-side panel to navigate to the **Orchestrator** main page. +1. Inside the **Orchestrator** main page, you can see the list of workflows that are available in your Backstage application. + +## Limitations + +1. The plugin architecture relies on the SonataFlow Dev Mode image locally running in the host machine. This is currently needed to map local workflow definition files to the container. This limitation will be removed in the future. +1. The integration with Scaffolder component catalog only works if the new backend system from Backstage is used. diff --git a/plugins/orchestrator/app-config.janus-idp.yaml b/plugins/orchestrator/app-config.janus-idp.yaml new file mode 100644 index 0000000000..2d65f77522 --- /dev/null +++ b/plugins/orchestrator/app-config.janus-idp.yaml @@ -0,0 +1,14 @@ +dynamicPlugins: + frontend: + janus-idp.backstage-plugin-orchestrator: + appIcons: + - name: orchestratorIcon + module: OrchestratorPlugin + importName: OrchestratorIcon + dynamicRoutes: + - path: /orchestrator + importName: OrchestratorPage + module: OrchestratorPlugin + menuItem: + icon: orchestratorIcon + text: Orchestrator diff --git a/plugins/orchestrator/dev/index.tsx b/plugins/orchestrator/dev/index.tsx new file mode 100644 index 0000000000..85147812f9 --- /dev/null +++ b/plugins/orchestrator/dev/index.tsx @@ -0,0 +1,14 @@ +import React from 'react'; + +import { createDevApp } from '@backstage/dev-utils'; + +import { OrchestratorPage, orchestratorPlugin } from '../src'; + +createDevApp() + .registerPlugin(orchestratorPlugin) + .addPage({ + element: , + title: 'Root Page', + path: '/orchestrator', + }) + .render(); diff --git a/plugins/orchestrator/package.json b/plugins/orchestrator/package.json new file mode 100644 index 0000000000..da2764ffa6 --- /dev/null +++ b/plugins/orchestrator/package.json @@ -0,0 +1,118 @@ +{ + "name": "@janus-idp/backstage-plugin-orchestrator", + "version": "0.0.1", + "license": "Apache-2.0", + "main": "src/index.ts", + "types": "src/index.ts", + "publishConfig": { + "access": "public", + "main": "dist/index.esm.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "frontend-plugin" + }, + "homepage": "https://janus-idp.io/", + "repository": "github:janus-idp/backstage-plugins", + "bugs": "https://github.com/janus-idp/backstage-plugins/issues", + "keywords": [ + "backstage", + "plugin", + "orchestrator", + "workflows" + ], + "files": [ + "app-config.janus-idp.yaml", + "dist", + "dist-scalprum" + ], + "scripts": { + "start": "backstage-cli package start", + "build": "backstage-cli package build", + "export-dynamic": "janus-cli package export-dynamic-plugin && yarn export-workflow-editor-envelope:dynamic", + "export-workflow-editor-envelope": "rm -rf dist/workflow-editor-envelope/ && webpack --config workflow-editor-envelope/webpack.config.js", + "export-workflow-editor-envelope:dynamic": "yarn export-workflow-editor-envelope --env envelopeParentFolder=./dist-scalprum", + "export-workflow-editor-envelope:embedded": "yarn export-workflow-editor-envelope --env envelopeParentFolder=../../packages/app/public", + "tsc": "tsc", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test --passWithNoTests --coverage", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack" + }, + "dependencies": { + "@backstage/core-app-api": "^1.11.0", + "@backstage/core-components": "^0.13.6", + "@backstage/core-plugin-api": "^1.7.0", + "@backstage/errors": "^1.2.3", + "@backstage/plugin-catalog": "^1.15.1", + "@backstage/plugin-scaffolder-common": "^1.4.2", + "@backstage/plugin-scaffolder-react": "^1.5.1", + "@backstage/types": "^1.1.1", + "@janus-idp/backstage-plugin-orchestrator-common": "0.0.1", + "@kie-tools-core/editor": "^0.32.0", + "@kie-tools-core/envelope-bus": "^0.32.0", + "@kie-tools-core/keyboard-shortcuts": "^0.32.0", + "@kie-tools-core/notifications": "^0.32.0", + "@kie-tools-core/react-hooks": "^0.32.0", + "@kie-tools/serverless-workflow-combined-editor": "^0.32.0", + "@kie-tools/serverless-workflow-diagram-editor-assets": "^0.32.0", + "@kie-tools/serverless-workflow-diagram-editor-envelope": "^0.32.0", + "@kie-tools/serverless-workflow-language-service": "^0.32.0", + "@kie-tools/serverless-workflow-service-catalog": "^0.32.0", + "@kie-tools/serverless-workflow-text-editor": "^0.32.0", + "@material-ui/core": "^4.12.4", + "@material-ui/icons": "^4.11.3", + "@material-ui/lab": "^4.0.0-alpha.45", + "@monaco-editor/react": "^4.6.0", + "@mui/icons-material": "^5.15.3", + "@rjsf/core-v5": "npm:@rjsf/core@5.7.3", + "@rjsf/material-ui-v5": "npm:@rjsf/material-ui@5.7.3", + "@rjsf/utils": "5.7.3", + "@rjsf/validator-ajv8": "5.7.3", + "classnames": "^2.5.1", + "json-schema": "^0.4.0", + "moment": "^2.29.4", + "monaco-editor": "^0.39.0", + "react-hook-form": "^7.45.1", + "react-json-view": "^1.21.3", + "react-moment": "^1.1.3", + "react-use": "^17.4.0", + "vscode-languageserver-types": "^3.16.0" + }, + "devDependencies": { + "@babel/core": "^7.16.0", + "@babel/preset-env": "^7.16.0", + "@babel/preset-react": "^7.16.0", + "@backstage/cli": "0.23.0", + "@backstage/dev-utils": "1.0.22", + "@backstage/test-utils": "^1.4.4", + "@janus-idp/cli": "1.4.7", + "@storybook/react": "^7.5.3", + "@types/json-schema": "^7.0.12", + "css-loader": "^6.5.1", + "file-loader": "^5.0.2", + "filemanager-webpack-plugin": "^6.1.4", + "monaco-editor-webpack-plugin": "7.0.1", + "monaco-yaml": "^4.0.4", + "sass": "^1.54.5", + "sass-loader": "^10.3.1", + "source-map-loader": "^2.0.2", + "style-loader": "^1.0.0", + "terser-webpack-plugin": "^5.3.9", + "ts-loader": "^8.4.0", + "url-loader": "^3.0.0", + "webpack": "^5.70.0", + "webpack-cli": "^4.10.0" + }, + "peerDependencies": { + "react": "^16.13.1 || ^17.0.0", + "react-router-dom": "^6.3.0" + }, + "scalprum": { + "name": "janus-idp.backstage-plugin-orchestrator", + "exposedModules": { + "OrchestratorPlugin": "./src/index.ts" + } + } +} diff --git a/plugins/orchestrator/src/__fixtures__/fakeFeatureFlagsApi.ts b/plugins/orchestrator/src/__fixtures__/fakeFeatureFlagsApi.ts new file mode 100644 index 0000000000..d1ac985a9b --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeFeatureFlagsApi.ts @@ -0,0 +1,22 @@ +import { + FeatureFlag, + FeatureFlagsApi, + FeatureFlagsSaveOptions, +} from '@backstage/core-plugin-api'; + +export const createFakeFeatureFlagsApi = ( + activeFeatureFlags?: string[], +): FeatureFlagsApi => ({ + registerFlag: (_flag: FeatureFlag) => { + throw new Error('Function not implemented.'); + }, + getRegisteredFlags: (): FeatureFlag[] => { + throw new Error('Function not implemented.'); + }, + isActive: (name: string): boolean => { + return !!activeFeatureFlags?.includes(name); + }, + save: (_options: FeatureFlagsSaveOptions): void => { + throw new Error('Function not implemented.'); + }, +}); diff --git a/plugins/orchestrator/src/__fixtures__/fakeNodeInstances.ts b/plugins/orchestrator/src/__fixtures__/fakeNodeInstances.ts new file mode 100644 index 0000000000..abe211a562 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeNodeInstances.ts @@ -0,0 +1,112 @@ +import { NodeInstance } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeNodeInstances: NodeInstance[] = [ + { + id: '5e33c387-1235-41cd-a7af-6d9e11c4381d', + nodeId: '2', + definitionId: '_jbpm-unique-352', + type: 'EndNode', + name: 'End', + enter: new Date('2024-01-07T11:02:44.264Z'), + exit: new Date('2024-01-07T11:02:44.264Z'), + }, + { + id: 'b1602034-979e-49ce-99f6-b4222f799f6d', + nodeId: '6', + definitionId: '_jbpm-unique-356', + type: 'CompositeContextNode', + name: 'GreetPerson', + enter: new Date('2024-01-07T11:02:39.204Z'), + exit: new Date('2024-01-07T11:02:44.264Z'), + }, + { + id: '57d01c6a-7dc7-4c85-86f3-db46d148bd15', + nodeId: '12', + definitionId: '_jbpm-unique-361', + type: 'EndNode', + name: 'EmbeddedEnd', + enter: new Date('2024-01-07T11:02:44.263Z'), + exit: new Date('2024-01-07T11:02:44.263Z'), + }, + { + id: 'dbb08e15-13e7-47db-b8a9-0924cc288885', + nodeId: '10', + definitionId: '_jbpm-unique-359', + type: 'ActionNode', + name: 'Script', + enter: new Date('2024-01-07T11:02:44.262Z'), + exit: new Date('2024-01-07T11:02:44.263Z'), + }, + { + id: '4b5cf2ef-32e3-4720-952a-90158cb8259d', + nodeId: '9', + definitionId: '_jbpm-unique-358', + type: 'ActionNode', + name: 'greetFunction', + enter: new Date('2024-01-07T11:02:44.261Z'), + exit: new Date('2024-01-07T11:02:44.262Z'), + }, + { + id: '54b25b60-b03a-4c76-bd69-648ce73ab3cf', + nodeId: '11', + definitionId: '11', + type: 'TimerNode', + name: 'TimerNode_11', + enter: new Date('2024-01-07T11:02:39.205Z'), + exit: new Date('2024-01-07T11:02:44.26Z'), + }, + { + id: 'dac8b5a1-d918-4e01-b538-58ca440e9f05', + nodeId: '7', + definitionId: '_jbpm-unique-357', + type: 'StartNode', + name: 'EmbeddedStart', + enter: new Date('2024-01-07T11:02:39.204Z'), + exit: new Date('2024-01-07T11:02:39.205Z'), + }, + { + id: 'c52c291b-b9c8-4fa1-a586-a0ed9531feb0', + nodeId: '14', + definitionId: '_jbpm-unique-363', + type: 'Join', + name: 'Join-GreetPerson', + enter: new Date('2024-01-07T11:02:39.203Z'), + exit: new Date('2024-01-07T11:02:39.203Z'), + }, + { + id: 'f5991057-3f73-493b-9be2-14a5069db99c', + nodeId: '4', + definitionId: '_jbpm-unique-354', + type: 'ActionNode', + name: 'GreetInEnglish', + enter: new Date('2024-01-07T11:02:39.197Z'), + exit: new Date('2024-01-07T11:02:39.203Z'), + }, + { + id: 'd44679a4-ae8b-4481-b65b-2310da26af47', + nodeId: '13', + definitionId: '_jbpm-unique-362', + type: 'Join', + name: 'Join-GreetInEnglish', + enter: new Date('2024-01-07T11:02:39.196Z'), + exit: new Date('2024-01-07T11:02:39.197Z'), + }, + { + id: '31ee04b6-ec89-4d69-9e48-f520a353abc3', + nodeId: '3', + definitionId: '3', + type: 'Split', + name: 'ChooseOnLanguage', + enter: new Date('2024-01-07T11:02:39.167Z'), + exit: new Date('2024-01-07T11:02:39.195Z'), + }, + { + id: 'f571878c-c59f-4f59-8759-9be8bb51a68e', + nodeId: '1', + definitionId: '_jbpm-unique-351', + type: 'StartNode', + name: 'Start', + enter: new Date('2024-01-07T11:02:39.165Z'), + exit: new Date('2024-01-07T11:02:39.166Z'), + }, +]; diff --git a/plugins/orchestrator/src/__fixtures__/fakeProcessInstance.ts b/plugins/orchestrator/src/__fixtures__/fakeProcessInstance.ts new file mode 100644 index 0000000000..f9aee53423 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeProcessInstance.ts @@ -0,0 +1,105 @@ +import { + ProcessInstance, + ProcessInstanceState, + WorkflowCategory, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { fakeWorkflowOverviewList } from './fakeWorkflowOverviewList'; + +let id = 10; +const baseDate = new Date('2023-11-16T10:50:34.346Z'); +const HOUR = 60 * 60 * 1000; +const DAY = 24 * HOUR; + +export const fakeProcessInstance1: ProcessInstance = { + id: `12f767c1-9002-43af-9515-62a72d0eaf${id++}`, + processName: fakeWorkflowOverviewList[0].name, + processId: fakeWorkflowOverviewList[0].workflowId, + state: ProcessInstanceState.Error, + start: baseDate, + end: new Date(baseDate.getTime() + 13 * HOUR), + lastUpdate: new Date(baseDate.getTime() + DAY), + nodes: [], + endpoint: 'enpoint/foo', + serviceUrl: 'service/bar', + source: 'my-source', + category: WorkflowCategory.INFRASTRUCTURE, + description: 'test description 1', + variables: { + foo: 'bar', + workflowdata: { + workflowOptions: { + 'my-category': { + id: 'next-workflow-1', + name: 'Next Workflow One', + }, + 'my-secod-category': [ + { + id: 'next-workflow-20', + name: 'Next Workflow Twenty', + }, + { + id: 'next-workflow-21', + name: 'Next Workflow Twenty One', + }, + ], + }, + }, + }, +}; + +export const fakeProcessInstance2: ProcessInstance = { + id: `12f767c1-9002-43af-9515-62a72d0eaf${id++}`, + processName: fakeWorkflowOverviewList[1].name, + processId: fakeWorkflowOverviewList[1].workflowId, + state: ProcessInstanceState.Completed, + start: new Date(baseDate.getTime() + HOUR), + end: new Date(baseDate.getTime() + DAY), + lastUpdate: new Date(baseDate.getTime() + DAY), + nodes: [], + variables: {}, + endpoint: 'enpoint/foo', + serviceUrl: 'service/bar', + source: 'my-source', + category: WorkflowCategory.ASSESSMENT, + description: 'test description 2', +}; + +export const fakeProcessInstance3: ProcessInstance = { + id: `12f767c1-9002-43af-9515-62a72d0eaf${id++}`, + processName: fakeWorkflowOverviewList[2].name, + processId: fakeWorkflowOverviewList[2].workflowId, + state: ProcessInstanceState.Active, + start: new Date(baseDate.getTime() + 2 * HOUR), + lastUpdate: new Date(baseDate.getTime() + DAY), + nodes: [], + variables: {}, + endpoint: 'enpoint/foo', + serviceUrl: 'service/bar', + source: 'my-source', + category: WorkflowCategory.INFRASTRUCTURE, + description: 'test description 3', +}; + +export const fakeProcessInstance4: ProcessInstance = { + id: `12f767c1-9002-43af-9515-62a72d0eaf${id++}`, + processName: fakeWorkflowOverviewList[3].name, + processId: fakeWorkflowOverviewList[3].workflowId, + state: ProcessInstanceState.Suspended, + start: baseDate, + lastUpdate: new Date(baseDate.getTime() + 2 * DAY), + nodes: [], + variables: {}, + endpoint: 'enpoint/foo', + serviceUrl: 'service/bar', + source: 'my-source', + category: WorkflowCategory.INFRASTRUCTURE, + description: 'test description 4', +}; + +export const fakeProcessInstances = [ + fakeProcessInstance1, + fakeProcessInstance2, + fakeProcessInstance3, + fakeProcessInstance4, +]; diff --git a/plugins/orchestrator/src/__fixtures__/fakeSpecs.ts b/plugins/orchestrator/src/__fixtures__/fakeSpecs.ts new file mode 100644 index 0000000000..15ec482722 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeSpecs.ts @@ -0,0 +1,2842 @@ +import { WorkflowSpecFile } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeSpecs: WorkflowSpecFile[] = [ + { + path: '/var/tmp/orchestrator/workflows/specs/actions-openapi.json', + content: { + openapi: '3.0.1', + info: { + title: 'Workflow Actions for BS API', + description: 'Workflow Actions BS API', + version: '0.0.1', + }, + servers: [ + { + url: 'http://host.docker.internal:7007/api/orchestrator', + }, + ], + paths: { + '/actions/fetch:plain': { + post: { + operationId: 'fetch:plain', + description: + 'Downloads content and places it in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.', + requestBody: { + description: 'Input parameters for the action fetch:plain in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_plain', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:plain response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fetch:plain:file': { + post: { + operationId: 'fetch:plain:file', + description: + 'Downloads single file and places it in the workspace.', + requestBody: { + description: + 'Input parameters for the action fetch:plain:file in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_plain_file', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:plain:file response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fetch:template': { + post: { + operationId: 'fetch:template', + description: + 'Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.', + requestBody: { + description: + 'Input parameters for the action fetch:template in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_template', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:template response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gerrit': { + post: { + operationId: 'publish:gerrit', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Gerrit.', + requestBody: { + description: + 'Input parameters for the action publish:gerrit in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gerrit', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gerrit response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gerrit:review': { + post: { + operationId: 'publish:gerrit:review', + description: 'Creates a new Gerrit review.', + requestBody: { + description: + 'Input parameters for the action publish:gerrit:review in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gerrit_review', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gerrit:review response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:github': { + post: { + operationId: 'publish:github', + description: + 'Initializes a git repository of contents in workspace and publishes it to GitHub.', + requestBody: { + description: + 'Input parameters for the action publish:github in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_github', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:github response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:github:pull-request': { + post: { + operationId: 'publish:github:pull-request', + requestBody: { + description: + 'Input parameters for the action publish:github:pull-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_github_pull-request', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:github:pull-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gitlab': { + post: { + operationId: 'publish:gitlab', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to GitLab.', + requestBody: { + description: + 'Input parameters for the action publish:gitlab in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gitlab', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gitlab response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gitlab:merge-request': { + post: { + operationId: 'publish:gitlab:merge-request', + requestBody: { + description: + 'Input parameters for the action publish:gitlab:merge-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gitlab_merge-request', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gitlab:merge-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucket': { + post: { + operationId: 'publish:bitbucket', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucket in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucket', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucket response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketCloud': { + post: { + operationId: 'publish:bitbucketCloud', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket Cloud.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketCloud in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketCloud', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucketCloud response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketServer': { + post: { + operationId: 'publish:bitbucketServer', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket Server.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketServer in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketServer', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucketServer response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketServer:pull-request': { + post: { + operationId: 'publish:bitbucketServer:pull-request', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketServer:pull-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketServer_pull-request', + }, + }, + }, + }, + responses: { + default: { + description: + 'Action publish:bitbucketServer:pull-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:azure': { + post: { + operationId: 'publish:azure', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Azure.', + requestBody: { + description: + 'Input parameters for the action publish:azure in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_azure', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:azure response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/debug:log': { + post: { + operationId: 'debug:log', + description: + 'Writes a message into the log or lists all files in the workspace.', + requestBody: { + description: 'Input parameters for the action debug:log in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/debug_log', + }, + }, + }, + }, + responses: { + default: { + description: 'Action debug:log response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/debug:wait': { + post: { + operationId: 'debug:wait', + description: 'Waits for a certain period of time.', + requestBody: { + description: 'Input parameters for the action debug:wait in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/debug_wait', + }, + }, + }, + }, + responses: { + default: { + description: 'Action debug:wait response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:register': { + post: { + operationId: 'catalog:register', + description: + 'Registers entities from a catalog descriptor file in the workspace into the software catalog.', + requestBody: { + description: + 'Input parameters for the action catalog:register in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_register', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:register response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:fetch': { + post: { + operationId: 'catalog:fetch', + description: + 'Returns entity or entities from the catalog by entity reference(s)', + requestBody: { + description: + 'Input parameters for the action catalog:fetch in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_fetch', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:fetch response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:write': { + post: { + operationId: 'catalog:write', + description: 'Writes the catalog-info.yaml for your template', + requestBody: { + description: + 'Input parameters for the action catalog:write in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_write', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:write response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fs:delete': { + post: { + operationId: 'fs:delete', + description: 'Deletes files and directories from the workspace', + requestBody: { + description: 'Input parameters for the action fs:delete in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fs_delete', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fs:delete response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fs:rename': { + post: { + operationId: 'fs:rename', + description: 'Renames files and directories within the workspace', + requestBody: { + description: 'Input parameters for the action fs:rename in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fs_rename', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fs:rename response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:actions:dispatch': { + post: { + operationId: 'github:actions:dispatch', + description: + 'Dispatches a GitHub Action workflow for a given branch or tag', + requestBody: { + description: + 'Input parameters for the action github:actions:dispatch in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_actions_dispatch', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:actions:dispatch response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:webhook': { + post: { + operationId: 'github:webhook', + description: 'Creates webhook for a repository on GitHub.', + requestBody: { + description: + 'Input parameters for the action github:webhook in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_webhook', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:webhook response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:issues:label': { + post: { + operationId: 'github:issues:label', + description: 'Adds labels to a pull request or issue on GitHub.', + requestBody: { + description: + 'Input parameters for the action github:issues:label in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_issues_label', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:issues:label response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:repo:create': { + post: { + operationId: 'github:repo:create', + description: 'Creates a GitHub repository.', + requestBody: { + description: + 'Input parameters for the action github:repo:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_repo_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:repo:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:repo:push': { + post: { + operationId: 'github:repo:push', + description: + 'Initializes a git repository of contents in workspace and publishes it to GitHub.', + requestBody: { + description: + 'Input parameters for the action github:repo:push in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_repo_push', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:repo:push response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:environment:create': { + post: { + operationId: 'github:environment:create', + description: 'Creates Deployment Environments', + requestBody: { + description: + 'Input parameters for the action github:environment:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_environment_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:environment:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:deployKey:create': { + post: { + operationId: 'github:deployKey:create', + description: 'Creates and stores Deploy Keys', + requestBody: { + description: + 'Input parameters for the action github:deployKey:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_deployKey_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:deployKey:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/confluence:transform:markdown': { + post: { + operationId: 'confluence:transform:markdown', + requestBody: { + description: + 'Input parameters for the action confluence:transform:markdown in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/confluence_transform_markdown', + }, + }, + }, + }, + responses: { + default: { + description: 'Action confluence:transform:markdown response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + }, + components: { + schemas: { + fetch_plain: { + type: 'object', + required: ['url'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the directory tree to fetch', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the contents to.', + type: 'string', + }, + }, + }, + fetch_plain_file: { + type: 'object', + required: ['url', 'targetPath'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the single file to fetch.', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the file as.', + type: 'string', + }, + }, + }, + fetch_template: { + type: 'object', + required: ['url'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the directory tree to fetch', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the contents to. Defaults to the working directory root.', + type: 'string', + }, + values: { + title: 'Template Values', + description: 'Values to pass on to the templating engine', + type: 'object', + }, + copyWithoutRender: { + title: '[Deprecated] Copy Without Render', + description: + 'An array of glob patterns. Any files or directories which match are copied without being processed as templates.', + type: 'array', + items: { + type: 'string', + }, + }, + copyWithoutTemplating: { + title: 'Copy Without Templating', + description: + 'An array of glob patterns. Contents of matched files or directories are copied without being processed, but paths are subject to rendering.', + type: 'array', + items: { + type: 'string', + }, + }, + cookiecutterCompat: { + title: 'Cookiecutter compatibility mode', + description: + 'Enable features to maximise compatibility with templates built for fetch:cookiecutter', + type: 'boolean', + }, + templateFileExtension: { + title: 'Template File Extension', + description: + 'If set, only files with the given extension will be templated. If set to `true`, the default extension `.njk` is used.', + type: 'boolean', + }, + replace: { + title: 'Replace files', + description: + 'If set, replace files in targetPath instead of skipping existing ones.', + type: 'boolean', + }, + }, + }, + publish_gerrit: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + type: 'string', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + }, + }, + }, + publish_gerrit_review: { + type: 'object', + required: ['repoUrl', 'gitCommitMessage'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + branch: { + title: 'Repository branch', + type: 'string', + description: + 'Branch of the repository the review will be created on', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory containing the repository', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: 'Sets the commit message on the repository.', + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + }, + }, + publish_github: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + homepage: { + title: 'Repository Homepage', + type: 'string', + }, + access: { + title: 'Repository Access', + description: + "Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'", + type: 'string', + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + dismissStaleReviews: { + title: 'Dismiss Stale Reviews', + description: + 'New reviewable commits pushed to a matching branch will dismiss pull request review approvals.', + type: 'boolean', + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + protectDefaultBranch: { + title: 'Protect Default Branch', + type: 'boolean', + description: + "Protect the default branch after creating the repository. The default value is 'true'", + }, + protectEnforceAdmins: { + title: 'Enforce Admins On Protected Branches', + type: 'boolean', + description: + "Enforce admins to adhere to default branch protection. The default value is 'true'", + }, + deleteBranchOnMerge: { + title: 'Delete Branch On Merge', + type: 'boolean', + description: + "Delete the branch after merging the PR. The default value is 'false'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + allowMergeCommit: { + title: 'Allow Merge Commits', + type: 'boolean', + description: "Allow merge commits. The default value is 'true'", + }, + allowSquashMerge: { + title: 'Allow Squash Merges', + type: 'boolean', + description: "Allow squash merges. The default value is 'true'", + }, + squashMergeCommitTitle: { + title: 'Default squash merge commit title', + enum: ['PR_TITLE', 'COMMIT_OR_PR_TITLE'], + description: + "Sets the default value for a squash merge commit title. The default value is 'COMMIT_OR_PR_TITLE'", + }, + squashMergeCommitMessage: { + title: 'Default squash merge commit message', + enum: ['PR_BODY', 'COMMIT_MESSAGES', 'BLANK'], + description: + "Sets the default value for a squash merge commit message. The default value is 'COMMIT_MESSAGES'", + }, + allowRebaseMerge: { + title: 'Allow Rebase Merges', + type: 'boolean', + description: "Allow rebase merges. The default value is 'true'", + }, + allowAutoMerge: { + title: 'Allow Auto Merges', + type: 'boolean', + description: + "Allow individual PRs to merge automatically when all merge requirements are met. The default value is 'false'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + collaborators: { + title: 'Collaborators', + description: + 'Provide additional users or teams with permissions', + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: ['access'], + properties: { + access: { + type: 'string', + description: 'The type of access for the user', + }, + user: { + type: 'string', + description: + 'The name of the user that will be added as a collaborator', + }, + team: { + type: 'string', + description: + 'The name of the team that will be added as a collaborator', + }, + }, + }, + }, + hasProjects: { + title: 'Enable projects', + type: 'boolean', + description: + "Enable projects for the repository. The default value is 'true' unless the organization has disabled repository projects", + }, + hasWiki: { + title: 'Enable the wiki', + type: 'boolean', + description: + "Enable the wiki for the repository. The default value is 'true'", + }, + hasIssues: { + title: 'Enable issues', + type: 'boolean', + description: + "Enable issues for the repository. The default value is 'true'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + topics: { + title: 'Topics', + type: 'array', + items: { + type: 'string', + }, + }, + repoVariables: { + title: 'Repository Variables', + description: 'Variables attached to the repository', + type: 'object', + }, + secrets: { + title: 'Repository Secrets', + description: 'Secrets attached to the repository', + type: 'object', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + 'publish_github_pull-request': { + required: ['repoUrl', 'title', 'description', 'branchName'], + type: 'object', + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the repository name and 'owner' is an organization or username", + type: 'string', + }, + branchName: { + type: 'string', + title: 'Branch Name', + description: 'The name for the branch', + }, + targetBranchName: { + type: 'string', + title: 'Target Branch Name', + description: 'The target branch name of the merge request', + }, + title: { + type: 'string', + title: 'Pull Request Name', + description: 'The name for the pull request', + }, + description: { + type: 'string', + title: 'Pull Request Description', + description: 'The description of the pull request', + }, + draft: { + type: 'boolean', + title: 'Create as Draft', + description: 'Create a draft pull request', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory to copy changes from', + }, + targetPath: { + type: 'string', + title: 'Repository Subdirectory', + description: 'Subdirectory of repository to apply changes to', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + reviewers: { + title: 'Pull Request Reviewers', + type: 'array', + items: { + type: 'string', + }, + description: + 'The users that will be added as reviewers to the pull request', + }, + teamReviewers: { + title: 'Pull Request Team Reviewers', + type: 'array', + items: { + type: 'string', + }, + description: + 'The teams that will be added as reviewers to the pull request', + }, + commitMessage: { + type: 'string', + title: 'Commit Message', + description: 'The commit message for the pull request commit', + }, + }, + }, + publish_gitlab: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + description: + "Accepts the format 'gitlab.com?repo=project_name&owner=group_name' where 'project_name' is the repository name and 'group_name' is a group or username", + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitLab', + }, + setUserAsOwner: { + title: 'Set User As Owner', + type: 'boolean', + description: + 'Set the token user as owner of the newly created repository. Requires a token authorized to do the edit in the integration configuration for the matching host', + }, + topics: { + title: 'Topic labels', + description: 'Topic labels to apply on the repository.', + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + 'publish_gitlab_merge-request': { + required: ['repoUrl', 'branchName'], + type: 'object', + properties: { + repoUrl: { + type: 'string', + title: 'Repository Location', + description: + "Accepts the format 'gitlab.com?repo=project_name&owner=group_name' where 'project_name' is the repository name and 'group_name' is a group or username", + }, + projectid: { + type: 'string', + title: 'projectid', + description: 'Project ID/Name(slug) of the Gitlab Project', + }, + title: { + type: 'string', + title: 'Merge Request Name', + description: 'The name for the merge request', + }, + description: { + type: 'string', + title: 'Merge Request Description', + description: 'The description of the merge request', + }, + branchName: { + type: 'string', + title: 'Source Branch Name', + description: 'The source branch name of the merge request', + }, + targetBranchName: { + type: 'string', + title: 'Target Branch Name', + description: 'The target branch name of the merge request', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory to copy changes from', + }, + targetPath: { + type: 'string', + title: 'Repository Subdirectory', + description: 'Subdirectory of repository to apply changes to', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitLab', + }, + commitAction: { + title: 'Commit action', + type: 'string', + enum: ['create', 'update', 'delete'], + description: + 'The action to be used for git commit. Defaults to create.', + }, + removeSourceBranch: { + title: 'Delete source branch', + type: 'boolean', + description: + 'Option to delete source branch once the MR has been merged. Default: false', + }, + assignee: { + title: 'Merge Request Assignee', + type: 'string', + description: 'User this merge request will be assigned to', + }, + }, + }, + publish_bitbucket: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + enableLFS: { + title: 'Enable LFS?', + description: + 'Enable LFS for the repository. Only available for hosted Bitbucket.', + type: 'boolean', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to BitBucket', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + }, + }, + publish_bitbucketCloud: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Cloud', + }, + }, + }, + publish_bitbucketServer: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + enableLFS: { + title: 'Enable LFS?', + description: 'Enable LFS for the repository.', + type: 'boolean', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Server', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Author Name', + type: 'string', + description: + "Sets the author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Author Email', + type: 'string', + description: 'Sets the author email for the commit.', + }, + }, + }, + 'publish_bitbucketServer_pull-request': { + type: 'object', + required: ['repoUrl', 'title', 'sourceBranch'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + title: { + title: 'Pull Request title', + type: 'string', + description: 'The title for the pull request', + }, + description: { + title: 'Pull Request Description', + type: 'string', + description: 'The description of the pull request', + }, + targetBranch: { + title: 'Target Branch', + type: 'string', + description: + "Branch of repository to apply changes to. The default value is 'master'", + }, + sourceBranch: { + title: 'Source Branch', + type: 'string', + description: 'Branch of repository to copy changes from', + }, + token: { + title: 'Authorization Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Server', + }, + }, + }, + publish_azure: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to Azure', + }, + }, + }, + debug_log: { + type: 'object', + properties: { + message: { + title: 'Message to output.', + type: 'string', + }, + listWorkspace: { + title: 'List all files in the workspace, if true.', + type: 'boolean', + }, + extra: { + title: 'Extra info', + }, + }, + }, + debug_wait: { + type: 'object', + properties: { + minutes: { + title: 'Waiting period in minutes.', + type: 'number', + }, + seconds: { + title: 'Waiting period in seconds.', + type: 'number', + }, + milliseconds: { + title: 'Waiting period in milliseconds.', + type: 'number', + }, + }, + }, + catalog_register: { + oneOf: [ + { + type: 'object', + required: ['catalogInfoUrl'], + properties: { + catalogInfoUrl: { + title: 'Catalog Info URL', + description: + 'An absolute URL pointing to the catalog info file location', + type: 'string', + }, + optional: { + title: 'Optional', + description: + 'Permit the registered location to optionally exist. Default: false', + type: 'boolean', + }, + }, + }, + { + type: 'object', + required: ['repoContentsUrl'], + properties: { + repoContentsUrl: { + title: 'Repository Contents URL', + description: + 'An absolute URL pointing to the root of a repository directory tree', + type: 'string', + }, + catalogInfoPath: { + title: 'Fetch URL', + description: + 'A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml', + type: 'string', + }, + optional: { + title: 'Optional', + description: + 'Permit the registered location to optionally exist. Default: false', + type: 'boolean', + }, + }, + }, + ], + }, + catalog_fetch: { + type: 'object', + properties: { + entityRef: { + type: 'string', + description: 'Entity reference of the entity to get', + }, + entityRefs: { + type: 'array', + items: { + type: 'string', + }, + description: 'Entity references of the entities to get', + }, + optional: { + type: 'boolean', + description: + 'Allow the entity or entities to optionally exist. Default: false', + }, + defaultKind: { + type: 'string', + description: 'The default kind', + }, + defaultNamespace: { + type: 'string', + description: 'The default namespace', + }, + }, + additionalProperties: false, + }, + catalog_write: { + type: 'object', + properties: { + filePath: { + type: 'string', + description: 'Defaults to catalog-info.yaml', + }, + entity: { + type: 'object', + additionalProperties: {}, + description: + 'You can provide the same values used in the Entity schema.', + }, + }, + required: ['entity'], + additionalProperties: false, + }, + fs_delete: { + required: ['files'], + type: 'object', + properties: { + files: { + title: 'Files', + description: + 'A list of files and directories that will be deleted', + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + fs_rename: { + required: ['files'], + type: 'object', + properties: { + files: { + title: 'Files', + description: + 'A list of file and directory names that will be renamed', + type: 'array', + items: { + type: 'object', + required: ['from', 'to'], + properties: { + from: { + type: 'string', + title: 'The source location of the file to be renamed', + }, + to: { + type: 'string', + title: 'The destination of the new file', + }, + overwrite: { + type: 'boolean', + title: + 'Overwrite existing file or directory, default is false', + }, + }, + }, + }, + }, + }, + github_actions_dispatch: { + type: 'object', + required: ['repoUrl', 'workflowId', 'branchOrTagName'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + workflowId: { + title: 'Workflow ID', + description: 'The GitHub Action Workflow filename', + type: 'string', + }, + branchOrTagName: { + title: 'Branch or Tag name', + description: + 'The git branch or tag name used to dispatch the workflow', + type: 'string', + }, + workflowInputs: { + title: 'Workflow Inputs', + description: + 'Inputs keys and values to send to GitHub Action configured on the workflow file. The maximum number of properties is 10. ', + type: 'object', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_webhook: { + type: 'object', + required: ['repoUrl', 'webhookUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + webhookUrl: { + title: 'Webhook URL', + description: 'The URL to which the payloads will be delivered', + type: 'string', + }, + webhookSecret: { + title: 'Webhook Secret', + description: + 'Webhook secret value. The default can be provided internally in action creation', + type: 'string', + }, + events: { + title: 'Triggering Events', + description: + 'Determines what events the hook is triggered for. Default: push', + type: 'string', + }, + active: { + title: 'Active', + type: 'boolean', + description: + 'Determines if notifications are sent when the webhook is triggered. Default: true', + }, + contentType: { + title: 'Content Type', + type: 'string', + enum: ['form', 'json'], + description: + "The media type used to serialize the payloads. The default is 'form'", + }, + insecureSsl: { + title: 'Insecure SSL', + type: 'boolean', + description: + "Determines whether the SSL certificate of the host for url will be verified when delivering payloads. Default 'false'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_issues_label: { + type: 'object', + required: ['repoUrl', 'number', 'labels'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the repository name and 'owner' is an organization or username", + type: 'string', + }, + number: { + title: 'Pull Request or issue number', + description: + 'The pull request or issue number to add labels to', + type: 'number', + }, + labels: { + title: 'Labels', + description: 'The labels to add to the pull request or issue', + type: 'array', + items: { + type: 'string', + }, + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_repo_create: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + homepage: { + title: 'Repository Homepage', + type: 'string', + }, + access: { + title: 'Repository Access', + description: + "Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'", + type: 'string', + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + deleteBranchOnMerge: { + title: 'Delete Branch On Merge', + type: 'boolean', + description: + "Delete the branch after merging the PR. The default value is 'false'", + }, + allowMergeCommit: { + title: 'Allow Merge Commits', + type: 'boolean', + description: "Allow merge commits. The default value is 'true'", + }, + allowSquashMerge: { + title: 'Allow Squash Merges', + type: 'boolean', + description: "Allow squash merges. The default value is 'true'", + }, + squashMergeCommitTitle: { + title: 'Default squash merge commit title', + enum: ['PR_TITLE', 'COMMIT_OR_PR_TITLE'], + description: + "Sets the default value for a squash merge commit title. The default value is 'COMMIT_OR_PR_TITLE'", + }, + squashMergeCommitMessage: { + title: 'Default squash merge commit message', + enum: ['PR_BODY', 'COMMIT_MESSAGES', 'BLANK'], + description: + "Sets the default value for a squash merge commit message. The default value is 'COMMIT_MESSAGES'", + }, + allowRebaseMerge: { + title: 'Allow Rebase Merges', + type: 'boolean', + description: "Allow rebase merges. The default value is 'true'", + }, + allowAutoMerge: { + title: 'Allow Auto Merges', + type: 'boolean', + description: + "Allow individual PRs to merge automatically when all merge requirements are met. The default value is 'false'", + }, + collaborators: { + title: 'Collaborators', + description: + 'Provide additional users or teams with permissions', + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: ['access'], + properties: { + access: { + type: 'string', + description: 'The type of access for the user', + }, + user: { + type: 'string', + description: + 'The name of the user that will be added as a collaborator', + }, + team: { + type: 'string', + description: + 'The name of the team that will be added as a collaborator', + }, + }, + }, + }, + hasProjects: { + title: 'Enable projects', + type: 'boolean', + description: + "Enable projects for the repository. The default value is 'true' unless the organization has disabled repository projects", + }, + hasWiki: { + title: 'Enable the wiki', + type: 'boolean', + description: + "Enable the wiki for the repository. The default value is 'true'", + }, + hasIssues: { + title: 'Enable issues', + type: 'boolean', + description: + "Enable issues for the repository. The default value is 'true'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + topics: { + title: 'Topics', + type: 'array', + items: { + type: 'string', + }, + }, + repoVariables: { + title: 'Repository Variables', + description: 'Variables attached to the repository', + type: 'object', + }, + secrets: { + title: 'Repository Secrets', + description: 'Secrets attached to the repository', + type: 'object', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + github_repo_push: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + dismissStaleReviews: { + title: 'Dismiss Stale Reviews', + description: + 'New reviewable commits pushed to a matching branch will dismiss pull request review approvals.', + type: 'boolean', + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + protectDefaultBranch: { + title: 'Protect Default Branch', + type: 'boolean', + description: + "Protect the default branch after creating the repository. The default value is 'true'", + }, + protectEnforceAdmins: { + title: 'Enforce Admins On Protected Branches', + type: 'boolean', + description: + "Enforce admins to adhere to default branch protection. The default value is 'true'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + github_environment_create: { + type: 'object', + required: ['repoUrl', 'name'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + name: { + title: 'Environment Name', + description: 'Name of the deployment environment to create', + type: 'string', + }, + deploymentBranchPolicy: { + title: 'Deployment Branch Policy', + description: + 'The type of deployment branch policy for this environment. To allow all branches to deploy, set to null.', + type: 'object', + required: ['protected_branches', 'custom_branch_policies'], + properties: { + protected_branches: { + title: 'Protected Branches', + description: + 'Whether only branches with branch protection rules can deploy to this environment. If protected_branches is true, custom_branch_policies must be false; if protected_branches is false, custom_branch_policies must be true.', + type: 'boolean', + }, + custom_branch_policies: { + title: 'Custom Branch Policies', + description: + 'Whether only branches that match the specified name patterns can deploy to this environment. If custom_branch_policies is true, protected_branches must be false; if custom_branch_policies is false, protected_branches must be true.', + type: 'boolean', + }, + }, + }, + customBranchPolicyNames: { + title: 'Custom Branch Policy Name', + description: + 'The name pattern that branches must match in order to deploy to the environment.\n\n Wildcard characters will not match /. For example, to match branches that begin with release/ and contain an additional single slash, use release/*/*. For more information about pattern matching syntax, see the Ruby File.fnmatch documentation.', + type: 'array', + items: { + type: 'string', + }, + }, + environmentVariables: { + title: 'Environment Variables', + description: + 'Environment variables attached to the deployment environment', + type: 'object', + }, + secrets: { + title: 'Deployment Secrets', + description: 'Secrets attached to the deployment environment', + type: 'object', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + }, + }, + github_deployKey_create: { + type: 'object', + required: ['repoUrl', 'publicKey', 'privateKey', 'deployKeyName'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + publicKey: { + title: 'SSH Public Key', + description: + "Generated from ssh-keygen. Begins with 'ssh-rsa', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'sk-ecdsa-sha2-nistp256@openssh.com', or 'sk-ssh-ed25519@openssh.com'.", + type: 'string', + }, + privateKey: { + title: 'SSH Private Key', + description: 'SSH Private Key generated from ssh-keygen', + type: 'string', + }, + deployKeyName: { + title: 'Deploy Key Name', + description: 'Name of the Deploy Key', + type: 'string', + }, + privateKeySecretName: { + title: 'Private Key GitHub Secret Name', + description: + "Name of the GitHub Secret to store the private key related to the Deploy Key. Defaults to: 'KEY_NAME_PRIVATE_KEY' where 'KEY_NAME' is the name of the Deploy Key", + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + }, + }, + confluence_transform_markdown: { + properties: { + confluenceUrls: { + type: 'array', + title: 'Confluence URL', + description: + 'Paste your Confluence url. Ensure it follows this format: https://{confluence+base+url}/display/{spacekey}/{page+title} or https://{confluence+base+url}/spaces/{spacekey}/pages/1234567/{page+title} for Confluence Cloud', + items: { + type: 'string', + default: 'Confluence URL', + }, + }, + repoUrl: { + type: 'string', + title: 'GitHub Repo Url', + description: + 'mkdocs.yml file location inside the github repo you want to store the document', + }, + }, + }, + }, + }, + }, + }, + { + path: '/var/tmp/orchestrator/workflows/specs/jira-openapi.json', + content: { + openapi: '3.0.1', + info: { + title: 'JIRA Actions for BS API', + description: 'JIRA Actions BS API', + version: '0.0.1', + }, + servers: [ + { + url: 'http://localhost:8080', + }, + ], + paths: { + '/rest/api/2/issue': { + post: { + tags: ['Issues'], + summary: 'Create issue', + description: + "Creates an issue or, where the option to create subtasks is enabled in Jira, a subtask. A transition may be applied, to move the issue or subtask to a workflow step other than the default start step, and issue properties set.\n\nThe content of the issue or subtask is defined using `update` and `fields`. The fields that can be set in the issue or subtask are determined using the [ Get create issue metadata](#api-rest-api-3-issue-createmeta-get). These are the same fields that appear on the issue's create screen. Note that the `description`, `environment`, and any `textarea` type custom fields (multi-line text fields) take Atlassian Document Format content. Single line custom fields (`textfield`) accept a string and don't handle Atlassian Document Format content.\n\nCreating a subtask differs from creating an issue as follows:\n\n * `issueType` must be set to a subtask issue type (use [ Get create issue metadata](#api-rest-api-3-issue-createmeta-get) to find subtask issue types).\n * `parent` must contain the ID or key of the parent issue.\n\nIn a next-gen project any issue may be made a child providing that the parent and child are members of the same project.\n\n**[Permissions](#permissions) required:** *Browse projects* and *Create issues* [project permissions](https://confluence.atlassian.com/x/yodKLg) for the project in which the issue or subtask is created.", + operationId: 'createIssue', + parameters: [ + { + name: 'updateHistory', + in: 'query', + description: + "Whether the project in which the issue is created is added to the user's **Recently viewed** project list, as shown under **Projects** in Jira. When provided, the issue type and request type are added to the user's history for a project. These values are then used to provide defaults on the issue create screen.", + schema: { + type: 'boolean', + default: false, + }, + }, + ], + requestBody: { + description: 'Input parameters for the action createIssue in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/IssueUpdateDetails', + }, + }, + }, + }, + responses: { + default: { + description: 'Create Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + }, + '/rest/api/2/issue/{issueIdOrKey}/transitions': { + post: { + tags: ['Issues'], + summary: 'Transition issue', + description: + 'Performs an issue transition and, if the transition has a screen, updates the fields from the transition screen.', + operationId: 'transitionIssue', + parameters: [ + { + name: 'issueIdOrKey', + in: 'path', + description: 'The ID or key of the issue.', + required: true, + schema: { + type: 'string', + }, + }, + ], + requestBody: { + description: + 'Input parameters for the action transitionIssue in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/TransitionIssue', + }, + }, + }, + }, + responses: { + default: { + description: 'Transition Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + get: { + tags: ['Issues'], + summary: 'Get issue transitions', + description: 'Get issue transitions', + operationId: 'getIssueTransitions', + parameters: [ + { + name: 'issueIdOrKey', + in: 'path', + description: 'The ID or key of the issue.', + required: true, + schema: { + type: 'string', + }, + }, + ], + responses: { + default: { + description: 'Transition Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + }, + }, + components: { + securitySchemes: { + basicAuth: { + type: 'http', + scheme: 'basic', + }, + bearerAuth: { + type: 'http', + scheme: 'bearer', + }, + }, + schemas: { + ErrorCollection: { + type: 'object', + }, + IssueUpdateDetails: { + type: 'object', + properties: { + fields: { + type: 'object', + }, + }, + }, + TransitionIssue: { + type: 'object', + properties: { + transition: { + type: 'object', + properties: { + id: { + type: 'string', + }, + }, + }, + update: { + type: 'object', + properties: { + comment: { + type: 'array', + items: { + type: 'object', + properties: { + add: { + type: 'object', + properties: { + body: { + type: 'string', + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + CreatedIssue: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The ID of the created issue or subtask.', + readOnly: true, + }, + key: { + type: 'string', + description: 'The key of the created issue or subtask.', + readOnly: true, + }, + self: { + type: 'string', + description: 'The URL of the created issue or subtask.', + readOnly: true, + }, + }, + }, + }, + }, + }, + }, +]; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaDifferentTypes.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaDifferentTypes.ts new file mode 100644 index 0000000000..f845f8fb0b --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaDifferentTypes.ts @@ -0,0 +1,169 @@ +import { WorkflowDataInputSchemaResponse } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeDataInputSchemaDifferentTypes: WorkflowDataInputSchemaResponse = + { + workflowItem: { + uri: 'yamlgreet.sw.yaml', + definition: { + id: 'yamlgreet', + version: '1.0', + specVersion: '0.8', + name: 'Greeting workflow', + description: 'YAML based greeting workflow', + dataInputSchema: 'schemas/yamlgreet__main_schema.json', + start: 'ChooseOnLanguage', + functions: [ + { + name: 'greetFunction', + type: 'custom', + operation: 'sysout', + }, + ], + states: [ + { + name: 'ChooseOnLanguage', + type: 'switch', + dataConditions: [ + { + condition: '${ .language == "English" }', + transition: 'GreetInEnglish', + }, + { + condition: '${ .language == "Spanish" }', + transition: 'GreetInSpanish', + }, + ], + defaultCondition: { + transition: 'GreetInEnglish', + }, + }, + { + name: 'GreetInEnglish', + type: 'inject', + data: { + greeting: 'Hello from YAML Workflow, ', + }, + transition: 'GreetPerson', + }, + { + name: 'GreetInSpanish', + type: 'inject', + data: { + greeting: 'Saludos desde YAML Workflow, ', + }, + transition: 'GreetPerson', + }, + { + name: 'GreetPerson', + type: 'operation', + actions: [ + { + name: 'greetAction', + functionRef: { + refName: 'greetFunction', + arguments: { + message: '.greeting+.name', + }, + }, + }, + ], + end: { + terminate: true, + }, + }, + ], + }, + }, + schemas: [ + { + title: 'Boolean field', + type: 'object', + properties: { + default: { + type: 'boolean', + title: 'checkbox (default)', + description: 'This is the checkbox-description', + }, + }, + }, + { + title: 'String formats', + type: 'object', + properties: { + email: { + type: 'string', + format: 'email', + }, + uri: { + type: 'string', + format: 'uri', + }, + }, + }, + { + title: 'Select', + type: 'object', + properties: { + select: { + title: 'Select widget with options', + type: 'string', + enum: ['pizza', 'pasta', 'canaloni', 'ravioli'], + }, + }, + }, + { + title: 'Date and time widgets', + type: 'object', + properties: { + datetime: { + type: 'string', + format: 'date-time', + }, + date: { + type: 'string', + format: 'date', + }, + time: { + type: 'string', + format: 'time', + }, + }, + }, + { + title: 'Array', + type: 'object', + required: ['title'], + properties: { + title: { + type: 'string', + title: 'Task list title', + }, + tasks: { + type: 'array', + title: 'Tasks', + items: { + type: 'object', + required: ['title'], + properties: { + title: { + type: 'string', + title: 'Title', + description: 'A sample title', + }, + details: { + type: 'string', + title: 'Task details', + description: 'Enter the task details', + }, + done: { + type: 'boolean', + title: 'Done?', + default: false, + }, + }, + }, + }, + }, + }, + ], + }; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponse.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponse.ts new file mode 100644 index 0000000000..0ed142da63 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponse.ts @@ -0,0 +1,94 @@ +import { WorkflowDataInputSchemaResponse } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeDataInputSchemaReponse: WorkflowDataInputSchemaResponse = { + workflowItem: { + uri: 'yamlgreet.sw.yaml', + definition: { + id: 'yamlgreet', + version: '1.0', + specVersion: '0.8', + name: 'Greeting workflow', + description: 'YAML based greeting workflow', + dataInputSchema: 'schemas/yamlgreet__main_schema.json', + start: 'ChooseOnLanguage', + functions: [ + { + name: 'greetFunction', + type: 'custom', + operation: 'sysout', + }, + ], + states: [ + { + name: 'ChooseOnLanguage', + type: 'switch', + dataConditions: [ + { + condition: '${ .language == "English" }', + transition: 'GreetInEnglish', + }, + { + condition: '${ .language == "Spanish" }', + transition: 'GreetInSpanish', + }, + ], + defaultCondition: { + transition: 'GreetInEnglish', + }, + }, + { + name: 'GreetInEnglish', + type: 'inject', + data: { + greeting: 'Hello from YAML Workflow, ', + }, + transition: 'GreetPerson', + }, + { + name: 'GreetInSpanish', + type: 'inject', + data: { + greeting: 'Saludos desde YAML Workflow, ', + }, + transition: 'GreetPerson', + }, + { + name: 'GreetPerson', + type: 'operation', + actions: [ + { + name: 'greetAction', + functionRef: { + refName: 'greetFunction', + arguments: { + message: '.greeting+.name', + }, + }, + }, + ], + end: { + terminate: true, + }, + }, + ], + }, + }, + schemas: [ + { + $id: 'classpath:/schemas/yamlgreet__sub_schema__Additional_input_data.json', + title: 'yamlgreet: Additional input data', + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + required: ['language'], + properties: { + language: { + title: 'language', + type: 'string', + pattern: 'Spanish|English', + description: 'Extracted from the Workflow definition', + default: 'English', + }, + }, + }, + ], +}; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponseMultiStep.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponseMultiStep.ts new file mode 100644 index 0000000000..712302acec --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowDataInputSchemaResponseMultiStep.ts @@ -0,0 +1,556 @@ +import { WorkflowDataInputSchemaResponse } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeDataInputSchemaMultiStepReponse: WorkflowDataInputSchemaResponse = + { + workflowItem: { + uri: 'quarkus-backend.sw.yaml', + definition: { + id: 'quarkus-backend', + version: '1.0', + specVersion: '0.8', + name: 'Quarkus Backend application', + description: + 'Create a starter Quarkus backend application with a CI pipeline', + dataInputSchema: 'schemas/quarkus-backend__main-schema.json', + functions: [ + { + name: 'runActionFetchTemplate', + operation: 'specs/actions-openapi.json#fetch:template', + }, + { + name: 'runActionPublishGithub', + operation: 'specs/actions-openapi.json#publish:github', + }, + { + name: 'runActionCatalogRegister', + operation: 'specs/actions-openapi.json#catalog:register', + }, + { + name: 'fs:delete', + operation: 'specs/actions-openapi.json#fs:delete', + }, + { + name: 'sysout', + type: 'custom', + operation: 'sysout', + }, + ], + errors: [ + { + name: 'Error on Action', + code: 'java.lang.RuntimeException', + }, + ], + start: 'Generating the Source Code Component', + states: [ + { + name: 'Generating the Source Code Component', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Fetch Template Action - Source Code', + functionRef: { + refName: 'runActionFetchTemplate', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/templates/github/quarkus-backend/skeleton', + values: { + orgName: '.orgName', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + port: '.port', + ci: '.ci', + sourceControl: 'github.com', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + version: '.version', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionFetchTemplateSourceCodeResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Clear File System - Source Code', + transition: 'Generating the CI Component', + }, + { + name: 'Generating the CI Component', + type: 'switch', + dataConditions: [ + { + condition: '${ .ci == "github" }', + transition: 'Generating the CI Component - GitHub', + }, + { + condition: '${ .ci == "tekton" }', + transition: 'Generating the CI Component - Tekton', + }, + ], + defaultCondition: { + transition: 'Generating the CI Component - GitHub', + }, + }, + { + name: 'Generating the CI Component - GitHub', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Run Template Fetch Action - CI - GitHub', + functionRef: { + refName: 'runActionFetchTemplate', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/github-actions', + copyWithoutTemplating: ['".github/workflows/"'], + values: { + orgName: '.orgName', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + port: '.port', + ci: '.ci', + sourceControl: 'github.com', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + version: '.version', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionTemplateFetchCIResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Clear File System - CI', + transition: 'Generating the Catalog Info Component', + }, + { + name: 'Generating the CI Component - Tekton', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Run Template Fetch Action - CI - Tekton', + functionRef: { + refName: 'runActionFetchTemplate', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/tekton', + copyWithoutTemplating: ['".github/workflows/"'], + values: { + orgName: '.orgName', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: '.imageUrl', + imageRepository: '.imageRepository', + imageBuilder: 's2i-java', + port: '.port', + ci: '.ci', + sourceControl: 'github.com', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + version: '.version', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionTemplateFetchCIResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Clear File System - CI', + transition: 'Generating the Catalog Info Component', + }, + { + name: 'Generating the Catalog Info Component', + type: 'operation', + actions: [ + { + name: 'Fetch Template Action - Catalog Info', + functionRef: { + refName: 'runActionFetchTemplate', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/catalog-info', + values: { + orgName: '.orgName', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: 'imageUrl', + imageRepository: '.imageRepository', + imageBuilder: 's2i-go', + port: '.port', + ci: '.ci', + sourceControl: 'github.com', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + version: '.version', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionFetchTemplateCatalogInfoResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Clear File System - Catalog', + transition: 'Publishing to the Source Code Repository', + }, + { + name: 'Publishing to the Source Code Repository', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Publish Github', + functionRef: { + refName: 'runActionPublishGithub', + arguments: { + allowedHosts: ['"github.com"'], + description: 'Workflow Action', + repoUrl: + '"github.com?owner=" + .orgName + "&repo=" + .repoName', + defaultBranch: 'main', + gitCommitMessage: 'Initial commit', + allowAutoMerge: true, + allowRebaseMerge: true, + }, + }, + actionDataFilter: { + toStateData: '.actionPublishResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Remove Source Code Repository', + transition: 'Registering the Catalog Info Component', + }, + { + name: 'Registering the Catalog Info Component', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Catalog Register Action', + functionRef: { + refName: 'runActionCatalogRegister', + arguments: { + repoContentsUrl: '.actionPublishResult.repoContentsUrl', + catalogInfoPath: '"/catalog-info.yaml"', + }, + }, + actionDataFilter: { + toStateData: '.actionCatalogRegisterResult', + }, + }, + ], + onErrors: [ + { + errorRef: 'Error on Action', + transition: 'Handle Error', + }, + ], + compensatedBy: 'Remove Catalog Info Component', + end: true, + }, + { + name: 'Handle Error', + type: 'operation', + actions: [ + { + name: 'Error Action', + functionRef: { + refName: 'sysout', + arguments: { + message: 'Error on workflow, triggering compensations', + }, + }, + }, + ], + end: { + compensate: true, + }, + }, + { + name: 'Clear File System - Source Code', + type: 'operation', + usedForCompensation: true, + actions: [ + { + name: 'Clear FS Action', + functionRef: { + refName: 'fs:delete', + arguments: { + files: ['./'], + }, + }, + }, + ], + }, + { + name: 'Clear File System - CI', + type: 'operation', + usedForCompensation: true, + actions: [ + { + name: 'Clear FS Action', + functionRef: { + refName: 'fs:delete', + arguments: { + files: ['./'], + }, + }, + }, + ], + }, + { + name: 'Clear File System - Catalog', + type: 'operation', + usedForCompensation: true, + actions: [ + { + name: 'Clear FS Action', + functionRef: { + refName: 'fs:delete', + arguments: { + files: ['./'], + }, + }, + }, + ], + }, + { + name: 'Remove Source Code Repository', + type: 'operation', + usedForCompensation: true, + actions: [ + { + name: 'Remove Source Code Repository', + functionRef: { + refName: 'sysout', + arguments: { + message: 'Remove Source Code Repository', + }, + }, + }, + ], + }, + { + name: 'Remove Catalog Info Component', + type: 'operation', + usedForCompensation: true, + actions: [ + { + name: 'Remove Catalog Info Component', + functionRef: { + refName: 'sysout', + arguments: { + message: 'Remove Catalog Info Component', + }, + }, + }, + ], + }, + ], + }, + }, + schemas: [ + { + $id: 'classpath:/schemas/quarkus-backend__ref-schema__New_Component.json', + title: 'Provide information about the new component', + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + properties: { + orgName: { + title: 'Organization Name', + description: 'Organization name', + type: 'string', + }, + repoName: { + title: 'Repository Name', + description: 'Repository name', + type: 'string', + }, + description: { + title: 'Description', + description: 'Help others understand what this component is for', + type: 'string', + }, + owner: { + title: 'Owner', + description: 'An entity from the catalog', + type: 'string', + }, + system: { + title: 'System', + description: 'An entity from the catalog', + type: 'string', + }, + port: { + title: 'Port', + description: 'Override the port exposed for the application', + type: 'number', + default: 8080, + }, + }, + required: ['orgName', 'repoName', 'owner', 'system', 'port'], + }, + { + $id: 'classpath:/schemas/quarkus-backend__ref-schema__Java_Metadata.json', + title: 'Provide information about the Java metadata', + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + properties: { + groupId: { + title: 'Group ID', + description: 'Maven Group ID eg (io.janus)', + type: 'string', + default: 'io.janus', + }, + artifactId: { + title: 'Artifact ID', + description: 'Maven Artifact ID', + type: 'string', + default: 'quarkusapp', + }, + javaPackageName: { + title: 'Java Package Namespace', + description: + 'Name for the Java Package (ensure to use the / character as this is used for folder structure) should match Group ID and Artifact ID', + type: 'string', + default: 'io/janus/quarkusapp', + }, + version: { + title: 'Version', + description: 'Maven Artifact Version', + type: 'string', + default: '1.0.0-SNAPSHOT', + }, + }, + required: ['groupId', 'artifactId', 'javaPackageName', 'version'], + }, + { + $id: 'classpath:/schemas/quarkus-backend__ref-schema__CI_Method.json', + title: 'Provide information about the CI method', + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + properties: { + ci: { + title: 'CI Method', + type: 'string', + default: 'github', + oneOf: [ + { + const: 'github', + title: 'GitHub Action', + }, + { + const: 'tekton', + title: 'Tekton', + }, + ], + }, + }, + allOf: [ + { + if: { + properties: { + ci: { + const: 'github', + }, + }, + }, + }, + { + if: { + properties: { + ci: { + const: 'tekton', + }, + }, + }, + then: { + properties: { + imageRepository: { + title: 'Image Registry', + description: 'The registry to use', + type: 'string', + default: 'quay.io', + oneOf: [ + { + const: 'quay.io', + title: 'Quay', + }, + { + const: 'image-registry.openshift-image-registry.svc:5000', + title: 'Internal OpenShift Registry', + }, + ], + }, + imageUrl: { + title: 'Image URL', + description: + 'The Quay.io or OpenShift Image URL //', + type: 'string', + }, + namespace: { + title: 'Namespace', + description: 'The namespace for deploying resources', + type: 'string', + }, + }, + required: ['namespace', 'imageUrl', 'imageRepository'], + }, + }, + ], + }, + ], + }; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowItem.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowItem.ts new file mode 100644 index 0000000000..a4d8249ea7 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowItem.ts @@ -0,0 +1,247 @@ +import { WorkflowItem } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeWorkflowItem: WorkflowItem = { + uri: 'quarkus-backend-workflow-ci-switch.sw.yaml', + definition: { + id: 'quarkus-backend-workflow-ci-switch', + version: '1.0', + specVersion: '0.8', + name: '[WF] Create a starter Quarkus Backend application with a CI pipeline - CI Switch', + description: + '[WF] Create a starter Quarkus Backend application with a CI pipeline - CI Switch', + functions: [ + { + name: 'runActionTemplateFetch', + operation: 'specs/actions-openapi.json#fetch:template', + }, + { + name: 'runActionPublishGithub', + operation: 'specs/actions-openapi.json#publish:github', + }, + { + name: 'runActionCatalogRegister', + operation: 'specs/actions-openapi.json#catalog:register', + }, + ], + start: 'Generating the Source Code Component', + states: [ + { + name: 'Generating the Source Code Component', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Fetch Template Action - Source Code', + functionRef: { + refName: 'runActionTemplateFetch', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/templates/github/quarkus-backend/skeleton', + values: { + githubOrg: '.githubOrg', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: 'imageUrl', + imageBuilder: '.imageBuilder', + imageRepository: '.imageRepository', + port: '.port', + ci: '.ci', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionFetchTemplateSourceCodeResult', + }, + }, + ], + transition: 'Generating the CI Component', + }, + { + name: 'Generating the CI Component', + type: 'switch', + dataConditions: [ + { + condition: '${ .ci == "github" }', + transition: 'Generating the CI Component - GitHub', + }, + { + condition: '${ .ci == "tekton" }', + transition: 'Generating the CI Component - Tekton', + }, + ], + defaultCondition: { + transition: 'Generating the CI Component - GitHub', + }, + }, + { + name: 'Generating the CI Component - GitHub', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Run Template Fetch Action - CI - GitHub', + functionRef: { + refName: 'runActionTemplateFetch', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/github-actions', + copyWithoutTemplating: ['".github/workflows/"'], + values: { + githubOrg: '.githubOrg', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: 'imageUrl', + imageBuilder: '.imageBuilder', + imageRepository: '.imageRepository', + port: '.port', + ci: '.ci', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionTemplateFetchCIResult', + }, + }, + ], + transition: 'Generating the Catalog Info Component', + }, + { + name: 'Generating the CI Component - Tekton', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Run Template Fetch Action - CI - Tekton', + functionRef: { + refName: 'runActionTemplateFetch', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/tekton', + copyWithoutTemplating: ['".tekton/workflows/"'], + values: { + githubOrg: '.githubOrg', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: 'imageUrl', + imageBuilder: '.imageBuilder', + imageRepository: '.imageRepository', + port: '.port', + ci: '.ci', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionTemplateFetchCIResult', + }, + }, + ], + transition: 'Generating the Catalog Info Component', + }, + { + name: 'Generating the Catalog Info Component', + type: 'operation', + actions: [ + { + name: 'Fetch Template Action - Catalog Info', + functionRef: { + refName: 'runActionTemplateFetch', + arguments: { + url: 'https://github.com/janus-idp/software-templates/tree/main/skeletons/catalog-info', + values: { + githubOrg: '.githubOrg', + repoName: '.repoName', + owner: '.owner', + system: '.system', + applicationType: 'api', + description: '.description', + namespace: '.namespace', + imageUrl: 'imageUrl', + imageBuilder: '.imageBuilder', + imageRepository: '.imageRepository', + port: '.port', + ci: '.ci', + groupId: '.groupId', + artifactId: '.artifactId', + javaPackageName: '.javaPackageName', + }, + }, + }, + actionDataFilter: { + toStateData: '.actionFetchTemplateCatalogInfoResult', + }, + }, + ], + transition: 'Publishing to the Source Code Repository', + }, + { + name: 'Publishing to the Source Code Repository', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Publish Github Action', + functionRef: { + refName: 'runActionPublishGithub', + arguments: { + allowedHosts: ['"github.com"'], + description: 'Workflow Action', + repoUrl: + '"github.com?owner=" + .githubOrg + "&repo=" + .repoName', + defaultBranch: '.defaultBranch', + gitCommitMessage: '.gitCommitMessage', + allowAutoMerge: true, + allowRebaseMerge: true, + }, + }, + actionDataFilter: { + toStateData: '.actionPublishResult', + }, + }, + ], + transition: 'Registering the Catalog Info Component', + }, + { + name: 'Registering the Catalog Info Component', + type: 'operation', + actionMode: 'sequential', + actions: [ + { + name: 'Catalog Register Action', + functionRef: { + refName: 'runActionCatalogRegister', + arguments: { + repoContentsUrl: '.actionPublishResult.repoContentsUrl', + catalogInfoPath: '"/catalog-info.yaml"', + }, + }, + actionDataFilter: { + toStateData: '.actionCatalogRegisterResult', + }, + }, + ], + end: true, + }, + ], + dataInputSchema: + 'schemas/quarkus-backend-workflow-ci-switch__main_schema.json', + annotations: ['workflow-type/ci'], + }, +}; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverview.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverview.ts new file mode 100644 index 0000000000..e769c24b75 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverview.ts @@ -0,0 +1,13 @@ +import { WorkflowOverview } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeWorkflowOverview: WorkflowOverview = { + workflowId: 'quarkus-backend-workflow-ci-switch', + name: '[WF] Create a starter Quarkus Backend application with a CI pipeline - CI Switch', + lastTriggeredMs: 1697276096000, + lastRunStatus: 'COMPLETED', + category: 'ci', + avgDurationMs: 150000, + description: + 'Create a starter Quarkus Backend application with a CI pipeline', + uri: 'quarkus-backend-workflow-ci-switch.sw.yaml', +}; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverviewList.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverviewList.ts new file mode 100644 index 0000000000..27d8b2bb14 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowOverviewList.ts @@ -0,0 +1,100 @@ +import { WorkflowOverview } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeWorkflowOverviewList: WorkflowOverview[] = [ + { + workflowId: 'quarkus-backend-workflow-ci-switch', + name: '[WF] Create a starter Quarkus Backend application with a CI pipeline - CI Switch', + uri: 'quarkus-backend-workflow-ci-switch.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'COMPLETED', + description: + '[WF] Create a starter Quarkus Backend application with a CI pipeline - CI Switch', + }, + { + workflowId: 'orchestrator-ansible-job-long-timeout', + name: '[WF] Ansible Job with Jira and Timeout', + uri: 'orchestrator-ansible-job-long-timeout.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'ACTIVE', + description: + '[WF] Launch an Ansible Job within Ansible Automation Platform with Jira integration and Timeout', + }, + { + workflowId: 'orchestrator-ansible-job-parallel-error-handler', + name: '[WF] Ansible Job - Parallel/ERROR', + uri: 'orchestrator-ansible-job-parallel-error-handler.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'SUSPENDED', + description: + '[WF] Launch an Ansible Job within Ansible Automation Platform - ERROR Handling', + }, + { + workflowId: 'orchestrator-ansible-job-long', + name: '[WF] Ansible Job with Jira', + uri: 'orchestrator-ansible-job-long.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'SUSPENDED', + description: + '[WF] Launch an Ansible Job within Ansible Automation Platform with Jira integration', + }, + { + workflowId: 'orchestrator-ansible-job', + name: '[WF] Ansible Job', + uri: 'orchestrator-ansible-job.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'ERROR', + description: + '[WF] Launch an Ansible Job within Ansible Automation Platform', + }, + { + workflowId: 'quarkus-backend-workflow-extended', + name: '[WF] Create a Quarkus Backend application with a CI pipeline - Extended', + uri: 'quarkus-backend-workflow-extended.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'SUSPENDED', + description: + '[WF] Create a starter Quarkus Backend application with a CI pipeline - Extended', + }, + { + workflowId: 'workflow_actions', + name: 'Workflow name', + uri: 'workflow_actions.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'COMPLETED', + description: 'Workflow description', + }, + { + workflowId: 'yamlgreet', + name: 'Greeting workflow', + uri: 'yamlgreet.sw.yaml', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'ERROR', + description: 'YAML based greeting workflow', + }, + { + workflowId: 'jira', + name: '[WF] Jira', + uri: 'jira.sw.json', + lastTriggeredMs: 1701765793, + category: 'Infrastructure', + avgDurationMs: 5000, + lastRunStatus: 'SUSPENDED', + description: '[WF] Jira issue', + }, +]; diff --git a/plugins/orchestrator/src/__fixtures__/fakeWorkflowSpecs.ts b/plugins/orchestrator/src/__fixtures__/fakeWorkflowSpecs.ts new file mode 100644 index 0000000000..2ca896568e --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/fakeWorkflowSpecs.ts @@ -0,0 +1,2842 @@ +import { WorkflowSpecFile } from '@janus-idp/backstage-plugin-orchestrator-common'; + +export const fakeWorkflowSpecs: WorkflowSpecFile[] = [ + { + path: '/tmp/orchestrator/repository/workflows/specs/actions-openapi.json', + content: { + openapi: '3.0.1', + info: { + title: 'Workflow Actions for BS API', + description: 'Workflow Actions BS API', + version: '0.0.1', + }, + servers: [ + { + url: 'http://host.docker.internal:7007/api/orchestrator', + }, + ], + paths: { + '/actions/fetch:plain': { + post: { + operationId: 'fetch:plain', + description: + 'Downloads content and places it in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.', + requestBody: { + description: 'Input parameters for the action fetch:plain in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_plain', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:plain response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fetch:plain:file': { + post: { + operationId: 'fetch:plain:file', + description: + 'Downloads single file and places it in the workspace.', + requestBody: { + description: + 'Input parameters for the action fetch:plain:file in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_plain_file', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:plain:file response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fetch:template': { + post: { + operationId: 'fetch:template', + description: + 'Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.', + requestBody: { + description: + 'Input parameters for the action fetch:template in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fetch_template', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fetch:template response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gerrit': { + post: { + operationId: 'publish:gerrit', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Gerrit.', + requestBody: { + description: + 'Input parameters for the action publish:gerrit in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gerrit', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gerrit response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gerrit:review': { + post: { + operationId: 'publish:gerrit:review', + description: 'Creates a new Gerrit review.', + requestBody: { + description: + 'Input parameters for the action publish:gerrit:review in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gerrit_review', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gerrit:review response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:github': { + post: { + operationId: 'publish:github', + description: + 'Initializes a git repository of contents in workspace and publishes it to GitHub.', + requestBody: { + description: + 'Input parameters for the action publish:github in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_github', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:github response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:github:pull-request': { + post: { + operationId: 'publish:github:pull-request', + requestBody: { + description: + 'Input parameters for the action publish:github:pull-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_github_pull-request', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:github:pull-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gitlab': { + post: { + operationId: 'publish:gitlab', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to GitLab.', + requestBody: { + description: + 'Input parameters for the action publish:gitlab in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gitlab', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gitlab response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:gitlab:merge-request': { + post: { + operationId: 'publish:gitlab:merge-request', + requestBody: { + description: + 'Input parameters for the action publish:gitlab:merge-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_gitlab_merge-request', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:gitlab:merge-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucket': { + post: { + operationId: 'publish:bitbucket', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucket in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucket', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucket response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketCloud': { + post: { + operationId: 'publish:bitbucketCloud', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket Cloud.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketCloud in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketCloud', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucketCloud response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketServer': { + post: { + operationId: 'publish:bitbucketServer', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Bitbucket Server.', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketServer in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketServer', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:bitbucketServer response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:bitbucketServer:pull-request': { + post: { + operationId: 'publish:bitbucketServer:pull-request', + requestBody: { + description: + 'Input parameters for the action publish:bitbucketServer:pull-request in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_bitbucketServer_pull-request', + }, + }, + }, + }, + responses: { + default: { + description: + 'Action publish:bitbucketServer:pull-request response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/publish:azure': { + post: { + operationId: 'publish:azure', + description: + 'Initializes a git repository of the content in the workspace, and publishes it to Azure.', + requestBody: { + description: + 'Input parameters for the action publish:azure in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/publish_azure', + }, + }, + }, + }, + responses: { + default: { + description: 'Action publish:azure response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/debug:log': { + post: { + operationId: 'debug:log', + description: + 'Writes a message into the log or lists all files in the workspace.', + requestBody: { + description: 'Input parameters for the action debug:log in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/debug_log', + }, + }, + }, + }, + responses: { + default: { + description: 'Action debug:log response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/debug:wait': { + post: { + operationId: 'debug:wait', + description: 'Waits for a certain period of time.', + requestBody: { + description: 'Input parameters for the action debug:wait in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/debug_wait', + }, + }, + }, + }, + responses: { + default: { + description: 'Action debug:wait response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:register': { + post: { + operationId: 'catalog:register', + description: + 'Registers entities from a catalog descriptor file in the workspace into the software catalog.', + requestBody: { + description: + 'Input parameters for the action catalog:register in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_register', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:register response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:fetch': { + post: { + operationId: 'catalog:fetch', + description: + 'Returns entity or entities from the catalog by entity reference(s)', + requestBody: { + description: + 'Input parameters for the action catalog:fetch in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_fetch', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:fetch response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/catalog:write': { + post: { + operationId: 'catalog:write', + description: 'Writes the catalog-info.yaml for your template', + requestBody: { + description: + 'Input parameters for the action catalog:write in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/catalog_write', + }, + }, + }, + }, + responses: { + default: { + description: 'Action catalog:write response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fs:delete': { + post: { + operationId: 'fs:delete', + description: 'Deletes files and directories from the workspace', + requestBody: { + description: 'Input parameters for the action fs:delete in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fs_delete', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fs:delete response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/fs:rename': { + post: { + operationId: 'fs:rename', + description: 'Renames files and directories within the workspace', + requestBody: { + description: 'Input parameters for the action fs:rename in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/fs_rename', + }, + }, + }, + }, + responses: { + default: { + description: 'Action fs:rename response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:actions:dispatch': { + post: { + operationId: 'github:actions:dispatch', + description: + 'Dispatches a GitHub Action workflow for a given branch or tag', + requestBody: { + description: + 'Input parameters for the action github:actions:dispatch in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_actions_dispatch', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:actions:dispatch response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:webhook': { + post: { + operationId: 'github:webhook', + description: 'Creates webhook for a repository on GitHub.', + requestBody: { + description: + 'Input parameters for the action github:webhook in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_webhook', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:webhook response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:issues:label': { + post: { + operationId: 'github:issues:label', + description: 'Adds labels to a pull request or issue on GitHub.', + requestBody: { + description: + 'Input parameters for the action github:issues:label in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_issues_label', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:issues:label response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:repo:create': { + post: { + operationId: 'github:repo:create', + description: 'Creates a GitHub repository.', + requestBody: { + description: + 'Input parameters for the action github:repo:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_repo_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:repo:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:repo:push': { + post: { + operationId: 'github:repo:push', + description: + 'Initializes a git repository of contents in workspace and publishes it to GitHub.', + requestBody: { + description: + 'Input parameters for the action github:repo:push in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_repo_push', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:repo:push response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:environment:create': { + post: { + operationId: 'github:environment:create', + description: 'Creates Deployment Environments', + requestBody: { + description: + 'Input parameters for the action github:environment:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_environment_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:environment:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/github:deployKey:create': { + post: { + operationId: 'github:deployKey:create', + description: 'Creates and stores Deploy Keys', + requestBody: { + description: + 'Input parameters for the action github:deployKey:create in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/github_deployKey_create', + }, + }, + }, + }, + responses: { + default: { + description: 'Action github:deployKey:create response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + '/actions/confluence:transform:markdown': { + post: { + operationId: 'confluence:transform:markdown', + requestBody: { + description: + 'Input parameters for the action confluence:transform:markdown in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/confluence_transform_markdown', + }, + }, + }, + }, + responses: { + default: { + description: 'Action confluence:transform:markdown response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + }, + }, + }, + components: { + schemas: { + fetch_plain: { + type: 'object', + required: ['url'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the directory tree to fetch', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the contents to.', + type: 'string', + }, + }, + }, + fetch_plain_file: { + type: 'object', + required: ['url', 'targetPath'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the single file to fetch.', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the file as.', + type: 'string', + }, + }, + }, + fetch_template: { + type: 'object', + required: ['url'], + properties: { + url: { + title: 'Fetch URL', + description: + 'Relative path or absolute URL pointing to the directory tree to fetch', + type: 'string', + }, + targetPath: { + title: 'Target Path', + description: + 'Target path within the working directory to download the contents to. Defaults to the working directory root.', + type: 'string', + }, + values: { + title: 'Template Values', + description: 'Values to pass on to the templating engine', + type: 'object', + }, + copyWithoutRender: { + title: '[Deprecated] Copy Without Render', + description: + 'An array of glob patterns. Any files or directories which match are copied without being processed as templates.', + type: 'array', + items: { + type: 'string', + }, + }, + copyWithoutTemplating: { + title: 'Copy Without Templating', + description: + 'An array of glob patterns. Contents of matched files or directories are copied without being processed, but paths are subject to rendering.', + type: 'array', + items: { + type: 'string', + }, + }, + cookiecutterCompat: { + title: 'Cookiecutter compatibility mode', + description: + 'Enable features to maximise compatibility with templates built for fetch:cookiecutter', + type: 'boolean', + }, + templateFileExtension: { + title: 'Template File Extension', + description: + 'If set, only files with the given extension will be templated. If set to `true`, the default extension `.njk` is used.', + type: 'boolean', + }, + replace: { + title: 'Replace files', + description: + 'If set, replace files in targetPath instead of skipping existing ones.', + type: 'boolean', + }, + }, + }, + publish_gerrit: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + type: 'string', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + }, + }, + }, + publish_gerrit_review: { + type: 'object', + required: ['repoUrl', 'gitCommitMessage'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + branch: { + title: 'Repository branch', + type: 'string', + description: + 'Branch of the repository the review will be created on', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory containing the repository', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: 'Sets the commit message on the repository.', + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + }, + }, + publish_github: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + homepage: { + title: 'Repository Homepage', + type: 'string', + }, + access: { + title: 'Repository Access', + description: + "Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'", + type: 'string', + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + dismissStaleReviews: { + title: 'Dismiss Stale Reviews', + description: + 'New reviewable commits pushed to a matching branch will dismiss pull request review approvals.', + type: 'boolean', + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + protectDefaultBranch: { + title: 'Protect Default Branch', + type: 'boolean', + description: + "Protect the default branch after creating the repository. The default value is 'true'", + }, + protectEnforceAdmins: { + title: 'Enforce Admins On Protected Branches', + type: 'boolean', + description: + "Enforce admins to adhere to default branch protection. The default value is 'true'", + }, + deleteBranchOnMerge: { + title: 'Delete Branch On Merge', + type: 'boolean', + description: + "Delete the branch after merging the PR. The default value is 'false'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + allowMergeCommit: { + title: 'Allow Merge Commits', + type: 'boolean', + description: "Allow merge commits. The default value is 'true'", + }, + allowSquashMerge: { + title: 'Allow Squash Merges', + type: 'boolean', + description: "Allow squash merges. The default value is 'true'", + }, + squashMergeCommitTitle: { + title: 'Default squash merge commit title', + enum: ['PR_TITLE', 'COMMIT_OR_PR_TITLE'], + description: + "Sets the default value for a squash merge commit title. The default value is 'COMMIT_OR_PR_TITLE'", + }, + squashMergeCommitMessage: { + title: 'Default squash merge commit message', + enum: ['PR_BODY', 'COMMIT_MESSAGES', 'BLANK'], + description: + "Sets the default value for a squash merge commit message. The default value is 'COMMIT_MESSAGES'", + }, + allowRebaseMerge: { + title: 'Allow Rebase Merges', + type: 'boolean', + description: "Allow rebase merges. The default value is 'true'", + }, + allowAutoMerge: { + title: 'Allow Auto Merges', + type: 'boolean', + description: + "Allow individual PRs to merge automatically when all merge requirements are met. The default value is 'false'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + collaborators: { + title: 'Collaborators', + description: + 'Provide additional users or teams with permissions', + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: ['access'], + properties: { + access: { + type: 'string', + description: 'The type of access for the user', + }, + user: { + type: 'string', + description: + 'The name of the user that will be added as a collaborator', + }, + team: { + type: 'string', + description: + 'The name of the team that will be added as a collaborator', + }, + }, + }, + }, + hasProjects: { + title: 'Enable projects', + type: 'boolean', + description: + "Enable projects for the repository. The default value is 'true' unless the organization has disabled repository projects", + }, + hasWiki: { + title: 'Enable the wiki', + type: 'boolean', + description: + "Enable the wiki for the repository. The default value is 'true'", + }, + hasIssues: { + title: 'Enable issues', + type: 'boolean', + description: + "Enable issues for the repository. The default value is 'true'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + topics: { + title: 'Topics', + type: 'array', + items: { + type: 'string', + }, + }, + repoVariables: { + title: 'Repository Variables', + description: 'Variables attached to the repository', + type: 'object', + }, + secrets: { + title: 'Repository Secrets', + description: 'Secrets attached to the repository', + type: 'object', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + 'publish_github_pull-request': { + required: ['repoUrl', 'title', 'description', 'branchName'], + type: 'object', + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the repository name and 'owner' is an organization or username", + type: 'string', + }, + branchName: { + type: 'string', + title: 'Branch Name', + description: 'The name for the branch', + }, + targetBranchName: { + type: 'string', + title: 'Target Branch Name', + description: 'The target branch name of the merge request', + }, + title: { + type: 'string', + title: 'Pull Request Name', + description: 'The name for the pull request', + }, + description: { + type: 'string', + title: 'Pull Request Description', + description: 'The description of the pull request', + }, + draft: { + type: 'boolean', + title: 'Create as Draft', + description: 'Create a draft pull request', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory to copy changes from', + }, + targetPath: { + type: 'string', + title: 'Repository Subdirectory', + description: 'Subdirectory of repository to apply changes to', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + reviewers: { + title: 'Pull Request Reviewers', + type: 'array', + items: { + type: 'string', + }, + description: + 'The users that will be added as reviewers to the pull request', + }, + teamReviewers: { + title: 'Pull Request Team Reviewers', + type: 'array', + items: { + type: 'string', + }, + description: + 'The teams that will be added as reviewers to the pull request', + }, + commitMessage: { + type: 'string', + title: 'Commit Message', + description: 'The commit message for the pull request commit', + }, + }, + }, + publish_gitlab: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + description: + "Accepts the format 'gitlab.com?repo=project_name&owner=group_name' where 'project_name' is the repository name and 'group_name' is a group or username", + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitLab', + }, + setUserAsOwner: { + title: 'Set User As Owner', + type: 'boolean', + description: + 'Set the token user as owner of the newly created repository. Requires a token authorized to do the edit in the integration configuration for the matching host', + }, + topics: { + title: 'Topic labels', + description: 'Topic labels to apply on the repository.', + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + 'publish_gitlab_merge-request': { + required: ['repoUrl', 'branchName'], + type: 'object', + properties: { + repoUrl: { + type: 'string', + title: 'Repository Location', + description: + "Accepts the format 'gitlab.com?repo=project_name&owner=group_name' where 'project_name' is the repository name and 'group_name' is a group or username", + }, + projectid: { + type: 'string', + title: 'projectid', + description: 'Project ID/Name(slug) of the Gitlab Project', + }, + title: { + type: 'string', + title: 'Merge Request Name', + description: 'The name for the merge request', + }, + description: { + type: 'string', + title: 'Merge Request Description', + description: 'The description of the merge request', + }, + branchName: { + type: 'string', + title: 'Source Branch Name', + description: 'The source branch name of the merge request', + }, + targetBranchName: { + type: 'string', + title: 'Target Branch Name', + description: 'The target branch name of the merge request', + }, + sourcePath: { + type: 'string', + title: 'Working Subdirectory', + description: + 'Subdirectory of working directory to copy changes from', + }, + targetPath: { + type: 'string', + title: 'Repository Subdirectory', + description: 'Subdirectory of repository to apply changes to', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitLab', + }, + commitAction: { + title: 'Commit action', + type: 'string', + enum: ['create', 'update', 'delete'], + description: + 'The action to be used for git commit. Defaults to create.', + }, + removeSourceBranch: { + title: 'Delete source branch', + type: 'boolean', + description: + 'Option to delete source branch once the MR has been merged. Default: false', + }, + assignee: { + title: 'Merge Request Assignee', + type: 'string', + description: 'User this merge request will be assigned to', + }, + }, + }, + publish_bitbucket: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + enableLFS: { + title: 'Enable LFS?', + description: + 'Enable LFS for the repository. Only available for hosted Bitbucket.', + type: 'boolean', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to BitBucket', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + }, + }, + publish_bitbucketCloud: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Cloud', + }, + }, + }, + publish_bitbucketServer: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public'], + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + enableLFS: { + title: 'Enable LFS?', + description: 'Enable LFS for the repository.', + type: 'boolean', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Server', + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Author Name', + type: 'string', + description: + "Sets the author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Author Email', + type: 'string', + description: 'Sets the author email for the commit.', + }, + }, + }, + 'publish_bitbucketServer_pull-request': { + type: 'object', + required: ['repoUrl', 'title', 'sourceBranch'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + title: { + title: 'Pull Request title', + type: 'string', + description: 'The title for the pull request', + }, + description: { + title: 'Pull Request Description', + type: 'string', + description: 'The description of the pull request', + }, + targetBranch: { + title: 'Target Branch', + type: 'string', + description: + "Branch of repository to apply changes to. The default value is 'master'", + }, + sourceBranch: { + title: 'Source Branch', + type: 'string', + description: 'Branch of repository to copy changes from', + }, + token: { + title: 'Authorization Token', + type: 'string', + description: + 'The token to use for authorization to BitBucket Server', + }, + }, + }, + publish_azure: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to Azure', + }, + }, + }, + debug_log: { + type: 'object', + properties: { + message: { + title: 'Message to output.', + type: 'string', + }, + listWorkspace: { + title: 'List all files in the workspace, if true.', + type: 'boolean', + }, + extra: { + title: 'Extra info', + }, + }, + }, + debug_wait: { + type: 'object', + properties: { + minutes: { + title: 'Waiting period in minutes.', + type: 'number', + }, + seconds: { + title: 'Waiting period in seconds.', + type: 'number', + }, + milliseconds: { + title: 'Waiting period in milliseconds.', + type: 'number', + }, + }, + }, + catalog_register: { + oneOf: [ + { + type: 'object', + required: ['catalogInfoUrl'], + properties: { + catalogInfoUrl: { + title: 'Catalog Info URL', + description: + 'An absolute URL pointing to the catalog info file location', + type: 'string', + }, + optional: { + title: 'Optional', + description: + 'Permit the registered location to optionally exist. Default: false', + type: 'boolean', + }, + }, + }, + { + type: 'object', + required: ['repoContentsUrl'], + properties: { + repoContentsUrl: { + title: 'Repository Contents URL', + description: + 'An absolute URL pointing to the root of a repository directory tree', + type: 'string', + }, + catalogInfoPath: { + title: 'Fetch URL', + description: + 'A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml', + type: 'string', + }, + optional: { + title: 'Optional', + description: + 'Permit the registered location to optionally exist. Default: false', + type: 'boolean', + }, + }, + }, + ], + }, + catalog_fetch: { + type: 'object', + properties: { + entityRef: { + type: 'string', + description: 'Entity reference of the entity to get', + }, + entityRefs: { + type: 'array', + items: { + type: 'string', + }, + description: 'Entity references of the entities to get', + }, + optional: { + type: 'boolean', + description: + 'Allow the entity or entities to optionally exist. Default: false', + }, + defaultKind: { + type: 'string', + description: 'The default kind', + }, + defaultNamespace: { + type: 'string', + description: 'The default namespace', + }, + }, + additionalProperties: false, + }, + catalog_write: { + type: 'object', + properties: { + filePath: { + type: 'string', + description: 'Defaults to catalog-info.yaml', + }, + entity: { + type: 'object', + additionalProperties: {}, + description: + 'You can provide the same values used in the Entity schema.', + }, + }, + required: ['entity'], + additionalProperties: false, + }, + fs_delete: { + required: ['files'], + type: 'object', + properties: { + files: { + title: 'Files', + description: + 'A list of files and directories that will be deleted', + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + fs_rename: { + required: ['files'], + type: 'object', + properties: { + files: { + title: 'Files', + description: + 'A list of file and directory names that will be renamed', + type: 'array', + items: { + type: 'object', + required: ['from', 'to'], + properties: { + from: { + type: 'string', + title: 'The source location of the file to be renamed', + }, + to: { + type: 'string', + title: 'The destination of the new file', + }, + overwrite: { + type: 'boolean', + title: + 'Overwrite existing file or directory, default is false', + }, + }, + }, + }, + }, + }, + github_actions_dispatch: { + type: 'object', + required: ['repoUrl', 'workflowId', 'branchOrTagName'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + workflowId: { + title: 'Workflow ID', + description: 'The GitHub Action Workflow filename', + type: 'string', + }, + branchOrTagName: { + title: 'Branch or Tag name', + description: + 'The git branch or tag name used to dispatch the workflow', + type: 'string', + }, + workflowInputs: { + title: 'Workflow Inputs', + description: + 'Inputs keys and values to send to GitHub Action configured on the workflow file. The maximum number of properties is 10. ', + type: 'object', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_webhook: { + type: 'object', + required: ['repoUrl', 'webhookUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + webhookUrl: { + title: 'Webhook URL', + description: 'The URL to which the payloads will be delivered', + type: 'string', + }, + webhookSecret: { + title: 'Webhook Secret', + description: + 'Webhook secret value. The default can be provided internally in action creation', + type: 'string', + }, + events: { + title: 'Triggering Events', + description: + 'Determines what events the hook is triggered for. Default: push', + type: 'string', + }, + active: { + title: 'Active', + type: 'boolean', + description: + 'Determines if notifications are sent when the webhook is triggered. Default: true', + }, + contentType: { + title: 'Content Type', + type: 'string', + enum: ['form', 'json'], + description: + "The media type used to serialize the payloads. The default is 'form'", + }, + insecureSsl: { + title: 'Insecure SSL', + type: 'boolean', + description: + "Determines whether the SSL certificate of the host for url will be verified when delivering payloads. Default 'false'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_issues_label: { + type: 'object', + required: ['repoUrl', 'number', 'labels'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the repository name and 'owner' is an organization or username", + type: 'string', + }, + number: { + title: 'Pull Request or issue number', + description: + 'The pull request or issue number to add labels to', + type: 'number', + }, + labels: { + title: 'Labels', + description: 'The labels to add to the pull request or issue', + type: 'array', + items: { + type: 'string', + }, + }, + token: { + title: 'Authentication Token', + type: 'string', + description: + 'The GITHUB_TOKEN to use for authorization to GitHub', + }, + }, + }, + github_repo_create: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + description: { + title: 'Repository Description', + type: 'string', + }, + homepage: { + title: 'Repository Homepage', + type: 'string', + }, + access: { + title: 'Repository Access', + description: + "Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'", + type: 'string', + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + repoVisibility: { + title: 'Repository Visibility', + type: 'string', + enum: ['private', 'public', 'internal'], + }, + deleteBranchOnMerge: { + title: 'Delete Branch On Merge', + type: 'boolean', + description: + "Delete the branch after merging the PR. The default value is 'false'", + }, + allowMergeCommit: { + title: 'Allow Merge Commits', + type: 'boolean', + description: "Allow merge commits. The default value is 'true'", + }, + allowSquashMerge: { + title: 'Allow Squash Merges', + type: 'boolean', + description: "Allow squash merges. The default value is 'true'", + }, + squashMergeCommitTitle: { + title: 'Default squash merge commit title', + enum: ['PR_TITLE', 'COMMIT_OR_PR_TITLE'], + description: + "Sets the default value for a squash merge commit title. The default value is 'COMMIT_OR_PR_TITLE'", + }, + squashMergeCommitMessage: { + title: 'Default squash merge commit message', + enum: ['PR_BODY', 'COMMIT_MESSAGES', 'BLANK'], + description: + "Sets the default value for a squash merge commit message. The default value is 'COMMIT_MESSAGES'", + }, + allowRebaseMerge: { + title: 'Allow Rebase Merges', + type: 'boolean', + description: "Allow rebase merges. The default value is 'true'", + }, + allowAutoMerge: { + title: 'Allow Auto Merges', + type: 'boolean', + description: + "Allow individual PRs to merge automatically when all merge requirements are met. The default value is 'false'", + }, + collaborators: { + title: 'Collaborators', + description: + 'Provide additional users or teams with permissions', + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: ['access'], + properties: { + access: { + type: 'string', + description: 'The type of access for the user', + }, + user: { + type: 'string', + description: + 'The name of the user that will be added as a collaborator', + }, + team: { + type: 'string', + description: + 'The name of the team that will be added as a collaborator', + }, + }, + }, + }, + hasProjects: { + title: 'Enable projects', + type: 'boolean', + description: + "Enable projects for the repository. The default value is 'true' unless the organization has disabled repository projects", + }, + hasWiki: { + title: 'Enable the wiki', + type: 'boolean', + description: + "Enable the wiki for the repository. The default value is 'true'", + }, + hasIssues: { + title: 'Enable issues', + type: 'boolean', + description: + "Enable issues for the repository. The default value is 'true'", + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + topics: { + title: 'Topics', + type: 'array', + items: { + type: 'string', + }, + }, + repoVariables: { + title: 'Repository Variables', + description: 'Variables attached to the repository', + type: 'object', + }, + secrets: { + title: 'Repository Secrets', + description: 'Secrets attached to the repository', + type: 'object', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + github_repo_push: { + type: 'object', + required: ['repoUrl'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + requireCodeOwnerReviews: { + title: 'Require CODEOWNER Reviews?', + description: + 'Require an approved review in PR including files with a designated Code Owner', + type: 'boolean', + }, + dismissStaleReviews: { + title: 'Dismiss Stale Reviews', + description: + 'New reviewable commits pushed to a matching branch will dismiss pull request review approvals.', + type: 'boolean', + }, + requiredStatusCheckContexts: { + title: 'Required Status Check Contexts', + description: + 'The list of status checks to require in order to merge into this branch', + type: 'array', + items: { + type: 'string', + }, + }, + bypassPullRequestAllowances: { + title: 'Bypass pull request requirements', + description: + 'Allow specific users, teams, or apps to bypass pull request requirements.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requiredApprovingReviewCount: { + title: 'Required approving review count', + type: 'number', + description: + 'Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. Defaults to 1.', + }, + restrictions: { + title: 'Restrict who can push to the protected branch', + description: + 'Restrict who can push to the protected branch. User, app, and team restrictions are only available for organization-owned repositories.', + type: 'object', + additionalProperties: false, + properties: { + apps: { + type: 'array', + items: { + type: 'string', + }, + }, + users: { + type: 'array', + items: { + type: 'string', + }, + }, + teams: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + requireBranchesToBeUpToDate: { + title: 'Require Branches To Be Up To Date?', + description: + "Require branches to be up to date before merging. The default value is 'true'", + type: 'boolean', + }, + requiredConversationResolution: { + title: 'Required Conversation Resolution', + description: + 'Requires all conversations on code to be resolved before a pull request can be merged into this branch', + type: 'boolean', + }, + defaultBranch: { + title: 'Default Branch', + type: 'string', + description: + "Sets the default branch on the repository. The default value is 'master'", + }, + protectDefaultBranch: { + title: 'Protect Default Branch', + type: 'boolean', + description: + "Protect the default branch after creating the repository. The default value is 'true'", + }, + protectEnforceAdmins: { + title: 'Enforce Admins On Protected Branches', + type: 'boolean', + description: + "Enforce admins to adhere to default branch protection. The default value is 'true'", + }, + gitCommitMessage: { + title: 'Git Commit Message', + type: 'string', + description: + "Sets the commit message on the repository. The default value is 'initial commit'", + }, + gitAuthorName: { + title: 'Default Author Name', + type: 'string', + description: + "Sets the default author name for the commit. The default value is 'Scaffolder'", + }, + gitAuthorEmail: { + title: 'Default Author Email', + type: 'string', + description: 'Sets the default author email for the commit.', + }, + sourcePath: { + title: 'Source Path', + description: + 'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.', + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + requiredCommitSigning: { + title: 'Require commit signing', + type: 'boolean', + description: + 'Require commit signing so that you must sign commits on this branch.', + }, + }, + }, + github_environment_create: { + type: 'object', + required: ['repoUrl', 'name'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + name: { + title: 'Environment Name', + description: 'Name of the deployment environment to create', + type: 'string', + }, + deploymentBranchPolicy: { + title: 'Deployment Branch Policy', + description: + 'The type of deployment branch policy for this environment. To allow all branches to deploy, set to null.', + type: 'object', + required: ['protected_branches', 'custom_branch_policies'], + properties: { + protected_branches: { + title: 'Protected Branches', + description: + 'Whether only branches with branch protection rules can deploy to this environment. If protected_branches is true, custom_branch_policies must be false; if protected_branches is false, custom_branch_policies must be true.', + type: 'boolean', + }, + custom_branch_policies: { + title: 'Custom Branch Policies', + description: + 'Whether only branches that match the specified name patterns can deploy to this environment. If custom_branch_policies is true, protected_branches must be false; if custom_branch_policies is false, protected_branches must be true.', + type: 'boolean', + }, + }, + }, + customBranchPolicyNames: { + title: 'Custom Branch Policy Name', + description: + 'The name pattern that branches must match in order to deploy to the environment.\n\n Wildcard characters will not match /. For example, to match branches that begin with release/ and contain an additional single slash, use release/*/*. For more information about pattern matching syntax, see the Ruby File.fnmatch documentation.', + type: 'array', + items: { + type: 'string', + }, + }, + environmentVariables: { + title: 'Environment Variables', + description: + 'Environment variables attached to the deployment environment', + type: 'object', + }, + secrets: { + title: 'Deployment Secrets', + description: 'Secrets attached to the deployment environment', + type: 'object', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + }, + }, + github_deployKey_create: { + type: 'object', + required: ['repoUrl', 'publicKey', 'privateKey', 'deployKeyName'], + properties: { + repoUrl: { + title: 'Repository Location', + description: + "Accepts the format 'github.com?repo=reponame&owner=owner' where 'reponame' is the new repository name and 'owner' is an organization or username", + type: 'string', + }, + publicKey: { + title: 'SSH Public Key', + description: + "Generated from ssh-keygen. Begins with 'ssh-rsa', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519', 'sk-ecdsa-sha2-nistp256@openssh.com', or 'sk-ssh-ed25519@openssh.com'.", + type: 'string', + }, + privateKey: { + title: 'SSH Private Key', + description: 'SSH Private Key generated from ssh-keygen', + type: 'string', + }, + deployKeyName: { + title: 'Deploy Key Name', + description: 'Name of the Deploy Key', + type: 'string', + }, + privateKeySecretName: { + title: 'Private Key GitHub Secret Name', + description: + "Name of the GitHub Secret to store the private key related to the Deploy Key. Defaults to: 'KEY_NAME_PRIVATE_KEY' where 'KEY_NAME' is the name of the Deploy Key", + type: 'string', + }, + token: { + title: 'Authentication Token', + type: 'string', + description: 'The token to use for authorization to GitHub', + }, + }, + }, + confluence_transform_markdown: { + properties: { + confluenceUrls: { + type: 'array', + title: 'Confluence URL', + description: + 'Paste your Confluence url. Ensure it follows this format: https://{confluence+base+url}/display/{spacekey}/{page+title} or https://{confluence+base+url}/spaces/{spacekey}/pages/1234567/{page+title} for Confluence Cloud', + items: { + type: 'string', + default: 'Confluence URL', + }, + }, + repoUrl: { + type: 'string', + title: 'GitHub Repo Url', + description: + 'mkdocs.yml file location inside the github repo you want to store the document', + }, + }, + }, + }, + }, + }, + }, + { + path: '/tmp/orchestrator/repository/workflows/specs/jira-openapi.json', + content: { + openapi: '3.0.1', + info: { + title: 'JIRA Actions for BS API', + description: 'JIRA Actions BS API', + version: '0.0.1', + }, + servers: [ + { + url: 'http://localhost:8080', + }, + ], + paths: { + '/rest/api/2/issue': { + post: { + tags: ['Issues'], + summary: 'Create issue', + description: + "Creates an issue or, where the option to create subtasks is enabled in Jira, a subtask. A transition may be applied, to move the issue or subtask to a workflow step other than the default start step, and issue properties set.\n\nThe content of the issue or subtask is defined using `update` and `fields`. The fields that can be set in the issue or subtask are determined using the [ Get create issue metadata](#api-rest-api-3-issue-createmeta-get). These are the same fields that appear on the issue's create screen. Note that the `description`, `environment`, and any `textarea` type custom fields (multi-line text fields) take Atlassian Document Format content. Single line custom fields (`textfield`) accept a string and don't handle Atlassian Document Format content.\n\nCreating a subtask differs from creating an issue as follows:\n\n * `issueType` must be set to a subtask issue type (use [ Get create issue metadata](#api-rest-api-3-issue-createmeta-get) to find subtask issue types).\n * `parent` must contain the ID or key of the parent issue.\n\nIn a next-gen project any issue may be made a child providing that the parent and child are members of the same project.\n\n**[Permissions](#permissions) required:** *Browse projects* and *Create issues* [project permissions](https://confluence.atlassian.com/x/yodKLg) for the project in which the issue or subtask is created.", + operationId: 'createIssue', + parameters: [ + { + name: 'updateHistory', + in: 'query', + description: + "Whether the project in which the issue is created is added to the user's **Recently viewed** project list, as shown under **Projects** in Jira. When provided, the issue type and request type are added to the user's history for a project. These values are then used to provide defaults on the issue create screen.", + schema: { + type: 'boolean', + default: false, + }, + }, + ], + requestBody: { + description: 'Input parameters for the action createIssue in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/IssueUpdateDetails', + }, + }, + }, + }, + responses: { + default: { + description: 'Create Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + }, + '/rest/api/2/issue/{issueIdOrKey}/transitions': { + post: { + tags: ['Issues'], + summary: 'Transition issue', + description: + 'Performs an issue transition and, if the transition has a screen, updates the fields from the transition screen.', + operationId: 'transitionIssue', + parameters: [ + { + name: 'issueIdOrKey', + in: 'path', + description: 'The ID or key of the issue.', + required: true, + schema: { + type: 'string', + }, + }, + ], + requestBody: { + description: + 'Input parameters for the action transitionIssue in BS', + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/TransitionIssue', + }, + }, + }, + }, + responses: { + default: { + description: 'Transition Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + get: { + tags: ['Issues'], + summary: 'Get issue transitions', + description: 'Get issue transitions', + operationId: 'getIssueTransitions', + parameters: [ + { + name: 'issueIdOrKey', + in: 'path', + description: 'The ID or key of the issue.', + required: true, + schema: { + type: 'string', + }, + }, + ], + responses: { + default: { + description: 'Transition Issue Response', + content: { + 'application/json': { + schema: { + type: 'object', + }, + }, + }, + }, + }, + deprecated: false, + security: [ + { + bearerAuth: [], + }, + ], + }, + }, + }, + components: { + securitySchemes: { + basicAuth: { + type: 'http', + scheme: 'basic', + }, + bearerAuth: { + type: 'http', + scheme: 'bearer', + }, + }, + schemas: { + ErrorCollection: { + type: 'object', + }, + IssueUpdateDetails: { + type: 'object', + properties: { + fields: { + type: 'object', + }, + }, + }, + TransitionIssue: { + type: 'object', + properties: { + transition: { + type: 'object', + properties: { + id: { + type: 'string', + }, + }, + }, + update: { + type: 'object', + properties: { + comment: { + type: 'array', + items: { + type: 'object', + properties: { + add: { + type: 'object', + properties: { + body: { + type: 'string', + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + CreatedIssue: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The ID of the created issue or subtask.', + readOnly: true, + }, + key: { + type: 'string', + description: 'The key of the created issue or subtask.', + readOnly: true, + }, + self: { + type: 'string', + description: 'The URL of the created issue or subtask.', + readOnly: true, + }, + }, + }, + }, + }, + }, + }, +]; diff --git a/plugins/orchestrator/src/__fixtures__/veryLongString.ts b/plugins/orchestrator/src/__fixtures__/veryLongString.ts new file mode 100644 index 0000000000..7753177aa5 --- /dev/null +++ b/plugins/orchestrator/src/__fixtures__/veryLongString.ts @@ -0,0 +1,61 @@ +export const veryLongString = ` +2 cups all-purpose flour +1 1/2 teaspoons baking powder +1/2 teaspoon baking soda +1/2 teaspoon salt +1 teaspoon ground cinnamon +1/2 teaspoon ground nutmeg +1/2 cup unsalted butter, softened +1 cup granulated sugar +1/2 cup brown sugar, packed +2 large eggs +2 teaspoons vanilla extract +1/2 cup sour cream +2 cups apples, peeled and diced (such as Granny Smith) +1/2 cup chopped nuts (optional) +Powdered sugar for dusting (optional) +Instructions: + +Preheat your oven to 350°F (175°C). Grease and flour a 9x13-inch baking pan. +In a medium bowl, whisk together the flour, baking powder, baking soda, salt, cinnamon, and nutmeg. Set aside. +In a large bowl, cream together the softened butter, granulated sugar, and brown sugar until light and fluffy. +Beat in the eggs one at a time, then stir in the vanilla extract. +Gradually add the dry ingredients to the wet ingredients, mixing until just combined. +Fold in the sour cream, followed by the diced apples and nuts (if using). +Spread the batter evenly in the prepared baking pan. +Bake for 40-45 minutes or until a toothpick inserted into the center comes out clean. +Allow the cake to cool in the pan for 10 minutes, then transfer it to a wire rack to cool completely. +Optionally, dust the cooled cake with powdered sugar before serving. +Enjoy your delicious homemade apple cake! + + +Ingredients: + +2 cups all-purpose flour +1 1/2 teaspoons baking powder +1/2 teaspoon baking soda +1/2 teaspoon salt +1 teaspoon ground cinnamon +1/2 teaspoon ground nutmeg +1/2 cup unsalted butter, softened +1 cup granulated sugar +1/2 cup brown sugar, packed +2 large eggs +2 teaspoons vanilla extract +1/2 cup sour cream +2 cups apples, peeled and diced (such as Granny Smith) +1/2 cup chopped nuts (optional) +Powdered sugar for dusting (optional) +Instructions: + +Preheat your oven to 350°F (175°C). Grease and flour a 9x13-inch baking pan. +In a medium bowl, whisk together the flour, baking powder, baking soda, salt, cinnamon, and nutmeg. Set aside. +In a large bowl, cream together the softened butter, granulated sugar, and brown sugar until light and fluffy. +Beat in the eggs one at a time, then stir in the vanilla extract. +Gradually add the dry ingredients to the wet ingredients, mixing until just combined. +Fold in the sour cream, followed by the diced apples and nuts (if using). +Spread the batter evenly in the prepared baking pan. +Bake for 40-45 minutes or until a toothpick inserted into the center comes out clean. +Allow the cake to cool in the pan for 10 minutes, then transfer it to a wire rack to cool completely. +Optionally, dust the cooled cake with powdered sugar before serving. +Enjoy your delicious homemade apple cake!`; diff --git a/plugins/orchestrator/src/api/MockOrchestratorClient.ts b/plugins/orchestrator/src/api/MockOrchestratorClient.ts new file mode 100644 index 0000000000..0d62a53eab --- /dev/null +++ b/plugins/orchestrator/src/api/MockOrchestratorClient.ts @@ -0,0 +1,193 @@ +import { JsonValue } from '@backstage/types'; + +import { + Job, + ProcessInstance, + WorkflowDataInputSchemaResponse, + WorkflowExecutionResponse, + WorkflowItem, + WorkflowListResult, + WorkflowOverview, + WorkflowOverviewListResult, + WorkflowSpecFile, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { hasOwnProp, isNonNullable } from '../utils/TypeGuards'; +import { OrchestratorApi } from './api'; + +export interface MockOrchestratorApiData { + createWorkflowDefinitionResponse: ReturnType< + OrchestratorApi['createWorkflowDefinition'] + >; + deleteWorkflowDefinitionResponse: ReturnType< + OrchestratorApi['deleteWorkflowDefinition'] + >; + executeWorkflowResponse: () => ReturnType; + getInstanceResponse: ReturnType; + getInstancesResponse: ReturnType; + getInstanceJobsResponse: ReturnType; + getSpecsResponse: ReturnType; + getWorkflowResponse: ReturnType; + getWorkflowDataInputSchemaResponse: ReturnType< + OrchestratorApi['getWorkflowDataInputSchema'] + >; + listWorkflowsResponse: ReturnType; + listWorkflowsOverviewResponse: ReturnType< + OrchestratorApi['listWorkflowsOverview'] + >; + getWorkflowOverviewResponse: ReturnType< + OrchestratorApi['getWorkflowOverview'] + >; +} + +export class MockOrchestratorClient implements OrchestratorApi { + private _mockData: Partial; + + constructor(mockData: Partial = {}) { + this._mockData = mockData; + } + + createWorkflowDefinition( + _uri: string, + _content?: string, + ): Promise { + if ( + !hasOwnProp(this._mockData, 'createWorkflowDefinitionResponse') || + !isNonNullable(this._mockData.createWorkflowDefinitionResponse) + ) { + throw new Error(`[createWorkflowDefinition]: No mock data available`); + } + + return Promise.resolve(this._mockData.createWorkflowDefinitionResponse); + } + + deleteWorkflowDefinition(_workflowId: string): Promise { + if ( + !hasOwnProp(this._mockData, 'deleteWorkflowDefinitionResponse') || + !isNonNullable(this._mockData.deleteWorkflowDefinitionResponse) + ) { + throw new Error(`[deleteWorkflowDefinition]: No mock data available`); + } + + return Promise.resolve(this._mockData.deleteWorkflowDefinitionResponse); + } + + executeWorkflow(_args: { + workflowId: string; + parameters: Record; + }): Promise { + if ( + !hasOwnProp(this._mockData, 'executeWorkflowResponse') || + !isNonNullable(this._mockData.executeWorkflowResponse) + ) { + throw new Error(`[executeWorkflow]: No mock data available`); + } + + return this._mockData.executeWorkflowResponse(); + } + + getInstance(_instanceId: string): Promise { + if ( + !hasOwnProp(this._mockData, 'getInstanceResponse') || + !isNonNullable(this._mockData.getInstanceResponse) + ) { + throw new Error(`[getInstance]: No mock data available`); + } + + return Promise.resolve(this._mockData.getInstanceResponse); + } + + getInstanceJobs(_instanceId: string): Promise { + if ( + !hasOwnProp(this._mockData, 'getInstanceJobsResponse') || + !isNonNullable(this._mockData.getInstanceJobsResponse) + ) { + throw new Error(`[getInstanceJobs]: No mock data available`); + } + + return Promise.resolve(this._mockData.getInstanceJobsResponse); + } + + getInstances(): Promise { + if ( + !hasOwnProp(this._mockData, 'getInstancesResponse') || + !isNonNullable(this._mockData.getInstancesResponse) + ) { + throw new Error(`[getInstances]: No mock data available`); + } + + return Promise.resolve(this._mockData.getInstancesResponse); + } + + getSpecs(): Promise { + if ( + !hasOwnProp(this._mockData, 'getSpecsResponse') || + !isNonNullable(this._mockData.getSpecsResponse) + ) { + throw new Error(`[getSpecs]: No mock data available`); + } + + return Promise.resolve(this._mockData.getSpecsResponse); + } + + getWorkflow(_workflowId: string): Promise { + if ( + !hasOwnProp(this._mockData, 'getWorkflowResponse') || + !isNonNullable(this._mockData.getWorkflowResponse) + ) { + throw new Error(`[getWorkflow]: No mock data available`); + } + + return Promise.resolve(this._mockData.getWorkflowResponse); + } + + getWorkflowDataInputSchema( + _workflowId: string, + ): Promise { + if ( + !hasOwnProp(this._mockData, 'getWorkflowDataInputSchemaResponse') || + !isNonNullable(this._mockData.getWorkflowDataInputSchemaResponse) + ) { + throw new Error(`[getWorkflowDataInputSchema]: No mock data available`); + } + + return Promise.resolve(this._mockData.getWorkflowDataInputSchemaResponse); + } + + listWorkflows(): Promise { + if ( + !hasOwnProp(this._mockData, 'listWorkflowsResponse') || + !isNonNullable(this._mockData.listWorkflowsResponse) + ) { + throw new Error(`[listWorkflows]: No mock data available`); + } + + return Promise.resolve(this._mockData.listWorkflowsResponse); + } + + listWorkflowsOverview(): Promise { + if ( + !hasOwnProp(this._mockData, 'listWorkflowsOverviewResponse') || + !isNonNullable(this._mockData.listWorkflowsOverviewResponse) + ) { + throw new Error(`[listWorkflowsOverview]: No mock data available`); + } + + return Promise.resolve(this._mockData.listWorkflowsOverviewResponse); + } + + getWorkflowOverview(): Promise { + if ( + !hasOwnProp(this._mockData, 'getWorkflowOverviewResponse') || + !isNonNullable(this._mockData.getWorkflowOverviewResponse) + ) { + throw new Error(`[getWorkflowOverview]: No mock data available`); + } + + return Promise.resolve(this._mockData.getWorkflowOverviewResponse); + } + + abortWorkflow(_workflowId: string): Promise { + return Promise.resolve(undefined); + } +} diff --git a/plugins/orchestrator/src/api/OrchestratorClient.ts b/plugins/orchestrator/src/api/OrchestratorClient.ts new file mode 100644 index 0000000000..a2b79d0b79 --- /dev/null +++ b/plugins/orchestrator/src/api/OrchestratorClient.ts @@ -0,0 +1,178 @@ +import { DiscoveryApi } from '@backstage/core-plugin-api'; +import { ResponseError } from '@backstage/errors'; +import { JsonValue } from '@backstage/types'; + +import { + Job, + ProcessInstance, + WorkflowDataInputSchemaResponse, + WorkflowExecutionResponse, + WorkflowItem, + WorkflowListResult, + WorkflowOverview, + WorkflowOverviewListResult, + WorkflowSpecFile, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { OrchestratorApi } from './api'; + +export interface OrchestratorClientOptions { + discoveryApi: DiscoveryApi; +} +export class OrchestratorClient implements OrchestratorApi { + private readonly discoveryApi: DiscoveryApi; + private baseUrl: string | null = null; + constructor(options: OrchestratorClientOptions) { + this.discoveryApi = options.discoveryApi; + } + + private async getBaseUrl(): Promise { + if (!this.baseUrl) { + this.baseUrl = await this.discoveryApi.getBaseUrl('orchestrator'); + } + + return this.baseUrl; + } + + async executeWorkflow(args: { + workflowId: string; + parameters: Record; + }): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows/${args.workflowId}/execute`, { + method: 'POST', + body: JSON.stringify(args.parameters), + headers: { 'content-type': 'application/json' }, + }); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async abortWorkflow(workflowId: string) { + const baseUrl = await this.getBaseUrl(); + const response = await fetch(`${baseUrl}/workflows/${workflowId}/abort`, { + method: 'DELETE', + headers: { 'content-type': 'application/json' }, + }); + + if (!response.ok) { + throw await ResponseError.fromResponse(response); + } + + return await response.json(); + } + + async getWorkflow(workflowId: string): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows/${workflowId}`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async listWorkflows(): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async listWorkflowsOverview(): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows/overview`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return res.json(); + } + + async getInstances(): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/instances`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async getInstance(instanceId: string): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/instances/${instanceId}`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async getInstanceJobs(instanceId: string): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/instances/${instanceId}/jobs`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async getWorkflowDataInputSchema( + workflowId: string, + ): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows/${workflowId}/inputSchema`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async createWorkflowDefinition( + uri: string, + content: string, + ): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows?uri=${uri}`, { + method: 'POST', + body: content, + headers: { + 'content-type': 'text/plain', + }, + }); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return await res.json(); + } + + async deleteWorkflowDefinition(workflowId: string): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/workflows/${workflowId}`, { + method: 'DELETE', + headers: { 'content-type': 'application/json' }, + }); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + } + + async getSpecs(): Promise { + const baseUrl = await this.getBaseUrl(); + const res = await fetch(`${baseUrl}/specs`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return res.json(); + } + + async getWorkflowOverview(workflowId: string): Promise { + const baseUrl = await this.discoveryApi.getBaseUrl('orchestrator'); + const res = await fetch(`${baseUrl}/workflows/${workflowId}/overview`); + if (!res.ok) { + throw await ResponseError.fromResponse(res); + } + return res.json(); + } +} diff --git a/plugins/orchestrator/src/api/api.ts b/plugins/orchestrator/src/api/api.ts new file mode 100644 index 0000000000..e777746513 --- /dev/null +++ b/plugins/orchestrator/src/api/api.ts @@ -0,0 +1,54 @@ +import { createApiRef } from '@backstage/core-plugin-api'; +import { JsonValue } from '@backstage/types'; + +import { + Job, + ProcessInstance, + WorkflowDataInputSchemaResponse, + WorkflowExecutionResponse, + WorkflowItem, + WorkflowListResult, + WorkflowOverview, + WorkflowOverviewListResult, + WorkflowSpecFile, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +export interface OrchestratorApi { + abortWorkflow(workflowId: string): Promise; + + executeWorkflow(args: { + workflowId: string; + parameters: Record; + }): Promise; + + getWorkflow(workflowId: string): Promise; + + listWorkflows(): Promise; + + listWorkflowsOverview(): Promise; + + getInstances(): Promise; + + getInstance(instanceId: string): Promise; + + getInstanceJobs(instanceId: string): Promise; + + getWorkflowDataInputSchema( + workflowId: string, + ): Promise; + + createWorkflowDefinition( + uri: string, + content?: string, + ): Promise; + + deleteWorkflowDefinition(workflowId: string): Promise; + + getSpecs(): Promise; + + getWorkflowOverview(workflowId: string): Promise; +} + +export const orchestratorApiRef = createApiRef({ + id: 'plugin.orchestrator.api', +}); diff --git a/plugins/orchestrator/src/api/index.ts b/plugins/orchestrator/src/api/index.ts new file mode 100644 index 0000000000..80bca0e57f --- /dev/null +++ b/plugins/orchestrator/src/api/index.ts @@ -0,0 +1,3 @@ +export { OrchestratorClient } from './OrchestratorClient'; +export type { OrchestratorClientOptions } from './OrchestratorClient'; +export { orchestratorApiRef } from './api'; diff --git a/plugins/orchestrator/src/components/BaseOrchestratorPage.tsx b/plugins/orchestrator/src/components/BaseOrchestratorPage.tsx new file mode 100644 index 0000000000..c541703d1f --- /dev/null +++ b/plugins/orchestrator/src/components/BaseOrchestratorPage.tsx @@ -0,0 +1,32 @@ +import React, { PropsWithChildren } from 'react'; + +import { Content, Header, Page } from '@backstage/core-components'; + +export interface BaseOrchestratorProps { + title?: string; + subtitle?: string; + type?: string; + typeLink?: string; + noPadding?: boolean; +} + +export const BaseOrchestratorPage = ({ + title, + subtitle, + type, + typeLink, + noPadding, + children, +}: PropsWithChildren) => { + return ( + +
+ {children} + + ); +}; diff --git a/plugins/orchestrator/src/components/CreateWorkflowPage.tsx b/plugins/orchestrator/src/components/CreateWorkflowPage.tsx new file mode 100644 index 0000000000..e4ad21f27e --- /dev/null +++ b/plugins/orchestrator/src/components/CreateWorkflowPage.tsx @@ -0,0 +1,144 @@ +import React, { useCallback, useMemo, useState } from 'react'; +import { useNavigate, useParams } from 'react-router-dom'; + +import { ContentHeader, InfoCard, Progress } from '@backstage/core-components'; +import { + alertApiRef, + errorApiRef, + useApi, + useRouteRef, + useRouteRefParams, +} from '@backstage/core-plugin-api'; + +import { useController } from '@kie-tools-core/react-hooks/dist/useController'; +import { Grid } from '@material-ui/core'; +import Button from '@material-ui/core/Button'; + +import { WORKFLOW_TITLE } from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { orchestratorApiRef } from '../api'; +import { editWorkflowRouteRef, workflowDefinitionsRouteRef } from '../routes'; +import { BaseOrchestratorPage } from './BaseOrchestratorPage'; +import { WorkflowEditor } from './WorkflowEditor'; +import { + EditorViewKind, + WorkflowEditorRef, +} from './WorkflowEditor/WorkflowEditor'; + +export const CreateWorkflowPage = () => { + const { format } = useParams(); + const { workflowId } = useRouteRefParams(editWorkflowRouteRef); + const [workflowEditor, workflowEditorRef] = + useController(); + const errorApi = useApi(errorApiRef); + const alertApi = useApi(alertApiRef); + const orchestratorApi = useApi(orchestratorApiRef); + const navigate = useNavigate(); + const definitionLink = useRouteRef(workflowDefinitionsRouteRef); + const [loading, setLoading] = useState(false); + + const workflowFormat = useMemo( + () => (format === 'json' ? 'json' : 'yaml'), + [format], + ); + + const handleResult = useCallback( + async (content: string) => { + if (!workflowEditor?.workflowItem) { + return; + } + + try { + const notifications = await workflowEditor.validate(); + if (notifications.length !== 0) { + const messages = notifications.map(n => n.message).join('; '); + errorApi.post({ + name: 'Validation error', + message: `The workflow cannot be saved due to: ${messages}`, + }); + return; + } + + setLoading(true); + + const workflowItem = await orchestratorApi.createWorkflowDefinition( + workflowEditor.workflowItem.uri, + content, + ); + if (!workflowItem?.definition.id) { + errorApi.post(new Error('Error creating workflow')); + return; + } + + alertApi.post({ + severity: 'info', + message: `Workflow ${workflowItem.definition.id} has been saved.`, + }); + navigate( + definitionLink({ + workflowId: workflowItem.definition.id, + format: workflowFormat, + }), + ); + } catch (e: any) { + errorApi.post(new Error(e)); + } finally { + setLoading(false); + } + }, + [ + workflowEditor, + errorApi, + orchestratorApi, + alertApi, + navigate, + definitionLink, + workflowFormat, + ], + ); + + return ( + + + + + {loading && } + { + workflowEditor?.getContent().then(content => { + if (content) { + handleResult(content); + } + }); + }} + > + Save + + ) + } + title={workflowId ?? `New ${WORKFLOW_TITLE}`} + > +
+ +
+
+
+
+
+ ); +}; diff --git a/plugins/orchestrator/src/components/EditWorkflowDialog.tsx b/plugins/orchestrator/src/components/EditWorkflowDialog.tsx new file mode 100644 index 0000000000..668d4a30da --- /dev/null +++ b/plugins/orchestrator/src/components/EditWorkflowDialog.tsx @@ -0,0 +1,100 @@ +import React from 'react'; + +import { alertApiRef, errorApiRef, useApi } from '@backstage/core-plugin-api'; + +import { useController } from '@kie-tools-core/react-hooks/dist/useController'; +import { Button, DialogActions } from '@material-ui/core'; + +import { orchestratorApiRef } from '../api'; +import SubmitButton from './SubmitButton'; +import { + OrchestratorWorkflowDialogProps, + WorkflowDialog, +} from './WorkflowDialog'; +import { + EditorViewKind, + WorkflowEditorRef, +} from './WorkflowEditor/WorkflowEditor'; + +const EditWorkflowDialog = ({ + name, + handleSaveSucceeded, + close, + ...props +}: Pick & { + handleSaveSucceeded: () => void; + name: string; +}) => { + const errorApi = useApi(errorApiRef); + const alertApi = useApi(alertApiRef); + const orchestratorApi = useApi(orchestratorApiRef); + const [workflowEditor, workflowEditorRef] = + useController(); + const [saving, setSaving] = React.useState(false); + + const handleSave = async () => { + if (!workflowEditor?.workflowItem) { + errorApi.post({ + name: 'Unexpected error', + message: `Workflow editor item undefined`, + }); + return; + } + try { + const notifications = await workflowEditor.validate(); + if (notifications?.length !== 0) { + const messages = notifications?.map(n => n.message).join('; '); + errorApi.post({ + name: 'Validation error', + message: `The workflow cannot be saved due to: ${messages}`, + }); + return; + } + const content = await workflowEditor?.getContent(); + if (!content) { + errorApi.post({ + name: 'Validation error', + message: `No content`, + }); + } + setSaving(true); + const workflowItem = await orchestratorApi.createWorkflowDefinition( + workflowEditor?.workflowItem?.uri, + content, + ); + alertApi.post({ + severity: 'info', + message: `Workflow ${workflowItem.definition.id} has been saved.`, + display: 'transient', + }); + handleSaveSucceeded(); + close(); + } catch (e: any) { + errorApi.post(new Error(e)); + } finally { + setSaving(false); + } + }; + + return ( + + + handleSave()} submitting={saving}> + Save + + + } + close={close} + {...props} + /> + ); +}; + +export default EditWorkflowDialog; diff --git a/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.stories.tsx b/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.stories.tsx new file mode 100644 index 0000000000..60ab2473a7 --- /dev/null +++ b/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.stories.tsx @@ -0,0 +1,103 @@ +import React from 'react'; + +import { TestApiProvider, wrapInTestApp } from '@backstage/test-utils'; + +import { Meta, StoryObj } from '@storybook/react'; + +import { WorkflowDataInputSchemaResponse } from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { fakeDataInputSchemaDifferentTypes } from '../../__fixtures__/fakeWorkflowDataInputSchemaDifferentTypes'; +import { fakeDataInputSchemaReponse } from '../../__fixtures__/fakeWorkflowDataInputSchemaResponse'; +import { fakeDataInputSchemaMultiStepReponse } from '../../__fixtures__/fakeWorkflowDataInputSchemaResponseMultiStep'; +import { orchestratorApiRef } from '../../api'; +import { MockOrchestratorClient } from '../../api/MockOrchestratorClient'; +import { orchestratorRootRouteRef } from '../../routes'; +import { ExecuteWorkflowPage } from './ExecuteWorkflowPage'; + +const meta = { + title: 'Orchestrator/ExecuteWorkflowPage', + component: ExecuteWorkflowPage, + decorators: [ + ( + _, + context?: { + args?: { + schemaResponse?: () => Promise; + }; + }, + ) => + wrapInTestApp( + { + // eslint-disable-next-line no-alert + alert('Execute workflow API called'); + return Promise.resolve({ id: 'dummy' }); + }, + }), + ], + ]} + > + + , + { + mountedRoutes: { + '/orchestrator': orchestratorRootRouteRef, + }, + }, + ), + ], +} satisfies Meta; + +export default meta; + +type Story = StoryObj; + +export const ExecuteWorkflowPageStory: Story = { + name: 'One step', +}; + +export const ExecuteWorkflowPageMultipleStepsStory: Story = { + name: 'Multiple steps', + args: { + schemaResponse: () => Promise.resolve(fakeDataInputSchemaMultiStepReponse), + }, +}; + +export const DifferentInputTypesStory: Story = { + name: 'Different input types', + args: { + schemaResponse: () => Promise.resolve(fakeDataInputSchemaDifferentTypes), + }, +}; + +export const ExecuteWorkflowPageNoSchemaStory: Story = { + name: 'No schema', + args: { + schemaResponse: () => ({ + ...fakeDataInputSchemaReponse, + schemas: [], + }), + }, +}; + +export const ExecuteWorkflowPageLoadingStory: Story = { + name: 'Loading', + args: { + schemaResponse: () => new Promise(() => {}), + }, +}; + +export const ExecuteWorkflowPageResponseErrorStory: Story = { + name: 'Response Error', + args: { + schemaResponse: () => Promise.reject(new Error('Testing error')), + }, +}; diff --git a/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.tsx b/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.tsx new file mode 100644 index 0000000000..71618ed02a --- /dev/null +++ b/plugins/orchestrator/src/components/ExecuteWorkflowPage/ExecuteWorkflowPage.tsx @@ -0,0 +1,133 @@ +import React, { useCallback, useState } from 'react'; +import { useNavigate } from 'react-router-dom'; +import { useAsync } from 'react-use'; + +import { + InfoCard, + Progress, + ResponseErrorPanel, +} from '@backstage/core-components'; +import { + useApi, + useRouteRef, + useRouteRefParams, +} from '@backstage/core-plugin-api'; +import { JsonValue } from '@backstage/types'; + +import { Grid } from '@material-ui/core'; + +import { WorkflowDataInputSchemaResponse } from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { orchestratorApiRef } from '../../api'; +import { + executeWorkflowRouteRef, + executeWorkflowWithBusinessKeyRouteRef, + workflowInstanceRouteRef, +} from '../../routes'; +import { getErrorObject } from '../../utils/errorUtils'; +import { BaseOrchestratorPage } from '../BaseOrchestratorPage'; +import JsonTextAreaForm from './JsonTextAreaForm'; +import StepperForm from './StepperForm'; + +export const ExecuteWorkflowPage = () => { + const orchestratorApi = useApi(orchestratorApiRef); + const { workflowId } = useRouteRefParams(executeWorkflowRouteRef); + const { businessKey } = useRouteRefParams( + executeWorkflowWithBusinessKeyRouteRef, + ); + const [isExecuting, setIsExecuting] = useState(false); + const [updateError, setUpdateError] = React.useState(); + const navigate = useNavigate(); + const instanceLink = useRouteRef(workflowInstanceRouteRef); + const { + value: schemaResponse, + loading, + error: responseError, + } = useAsync( + async (): Promise => + await orchestratorApi.getWorkflowDataInputSchema(workflowId), + [orchestratorApi, workflowId], + ); + + const handleExecute = useCallback( + async (getParameters: () => Record) => { + setUpdateError(undefined); + let parameters: Record = {}; + try { + parameters = getParameters(); + } catch (err) { + setUpdateError(getErrorObject(err)); + return; + } + try { + if (businessKey !== undefined) { + parameters.businessKey = businessKey; + } + setIsExecuting(true); + const response = await orchestratorApi.executeWorkflow({ + workflowId, + parameters, + }); + navigate(instanceLink({ instanceId: response.id })); + } catch (err) { + setUpdateError(getErrorObject(err)); + } finally { + setIsExecuting(false); + } + }, + [orchestratorApi, workflowId, navigate, instanceLink, businessKey], + ); + + let pageContent; + + if (loading) { + pageContent = ; + } else if (responseError) { + pageContent = ; + } else if (!schemaResponse) { + pageContent = ( + + ); + } else { + pageContent = ( + + {updateError && ( + + + + )} + + + {schemaResponse.schemas.length > 0 ? ( + + ) : ( + + )} + + + + ); + } + + return ( + + {pageContent} + + ); +}; diff --git a/plugins/orchestrator/src/components/ExecuteWorkflowPage/JsonTextAreaForm.tsx b/plugins/orchestrator/src/components/ExecuteWorkflowPage/JsonTextAreaForm.tsx new file mode 100644 index 0000000000..fc8f6a014c --- /dev/null +++ b/plugins/orchestrator/src/components/ExecuteWorkflowPage/JsonTextAreaForm.tsx @@ -0,0 +1,70 @@ +import React from 'react'; + +import { JsonValue } from '@backstage/types'; + +import { Box, Grid, useTheme } from '@material-ui/core'; +import { Alert, AlertTitle } from '@material-ui/lab'; +import { Editor } from '@monaco-editor/react'; + +import SubmitButton from '../SubmitButton'; + +const DEFAULT_VALUE = JSON.stringify({ myKey: 'myValue' }, null, 4); + +const JsonTextAreaForm = ({ + isExecuting, + handleExecute, +}: { + isExecuting: boolean; + handleExecute: ( + getParameters: () => Record, + ) => Promise; +}) => { + const [jsonText, setJsonText] = React.useState(DEFAULT_VALUE); + const theme = useTheme(); + const getParameters = (): Record => { + if (!jsonText) { + return {}; + } + const parameters = JSON.parse(jsonText); + return parameters as Record; + }; + + return ( + + + + Missing JSON Schema for Input Form. + Type the input data in JSON format below. +
+ If you prefer using a form to start the workflow, ensure a valid JSON + schema is provided in the 'dataInputSchema' property of your workflow + definition file. +
+
+ + + setJsonText(value ?? '')} + height="30rem" + theme={theme.palette.type === 'dark' ? 'vs-dark' : 'light'} + options={{ + minimap: { enabled: false }, + }} + /> + + + + handleExecute(getParameters)} + > + Run + + +
+ ); +}; + +export default JsonTextAreaForm; diff --git a/plugins/orchestrator/src/components/ExecuteWorkflowPage/StepperForm.tsx b/plugins/orchestrator/src/components/ExecuteWorkflowPage/StepperForm.tsx new file mode 100644 index 0000000000..3bfa2de9e8 --- /dev/null +++ b/plugins/orchestrator/src/components/ExecuteWorkflowPage/StepperForm.tsx @@ -0,0 +1,190 @@ +import React from 'react'; + +import { Content, StructuredMetadataTable } from '@backstage/core-components'; +import { JsonValue } from '@backstage/types'; + +import { + Box, + Button, + Paper, + Step, + StepContent, + StepLabel, + Stepper, + Typography, +} from '@material-ui/core'; +import { FormProps, withTheme } from '@rjsf/core-v5'; +import { Theme as MuiTheme } from '@rjsf/material-ui-v5'; +import { UiSchema } from '@rjsf/utils'; +import validator from '@rjsf/validator-ajv8'; +import { JSONSchema7 } from 'json-schema'; + +import SubmitButton from '../SubmitButton'; + +const MuiForm = withTheme>(MuiTheme); + +const ReviewStep = ({ + busy, + formDataObjects, + handleBack, + handleReset, + handleExecute, +}: { + busy: boolean; + formDataObjects: Record[]; + handleBack: () => void; + handleReset: () => void; + handleExecute: () => void; +}) => { + const combinedFormData = React.useMemo( + () => + formDataObjects.reduce>( + (prev, cur) => ({ ...prev, ...cur }), + {}, + ), + [formDataObjects], + ); + return ( + + + Review and run + + + + + + Run + + + + ); +}; + +const FormWrapper = ({ + formData, + schema, + onSubmit, + children, +}: Pick< + FormProps>, + 'formData' | 'schema' | 'onSubmit' | 'children' +>) => { + const firstKey = Object.keys(schema?.properties || {})[0]; + const uiSchema: UiSchema> | undefined = firstKey + ? { [firstKey]: { 'ui:autofocus': 'true' } } + : undefined; + return ( + + {children} + + ); +}; + +const StepperForm = ({ + refSchemas, + handleExecute, + isExecuting, +}: { + refSchemas: JSONSchema7[]; + handleExecute: ( + getParameters: () => Record, + ) => Promise; + isExecuting: boolean; +}) => { + const [activeStep, setActiveStep] = React.useState(0); + const handleBack = () => setActiveStep(activeStep - 1); + + const [formDataObjects, setFormDataObjects] = React.useState< + Record[] + >([]); + + const getFormData = () => + formDataObjects.reduce>( + (prev, curFormObject) => ({ ...prev, ...curFormObject }), + {}, + ); + + const resetFormDataObjects = React.useCallback( + () => + setFormDataObjects( + refSchemas.reduce[]>( + prev => [...prev, {}], + [], + ), + ), + [refSchemas], + ); + + React.useEffect(() => { + resetFormDataObjects(); + }, [resetFormDataObjects]); + + return ( + <> + + {refSchemas.map((schema, index) => ( + + + + {schema.title} + + + + { + const newDataObjects = [...formDataObjects]; + newDataObjects.splice(index, 1, e.formData ?? {}); + setFormDataObjects(newDataObjects); + setActiveStep(activeStep + 1); + }} + > + + + + + + ))} + + {activeStep === refSchemas.length && ( + { + resetFormDataObjects(); + setActiveStep(0); + }} + busy={isExecuting} + handleExecute={() => handleExecute(() => getFormData())} + /> + )} + + ); +}; + +export default StepperForm; diff --git a/plugins/orchestrator/src/components/NewWorkflowViewerPage.tsx b/plugins/orchestrator/src/components/NewWorkflowViewerPage.tsx new file mode 100644 index 0000000000..a2b5b0fa58 --- /dev/null +++ b/plugins/orchestrator/src/components/NewWorkflowViewerPage.tsx @@ -0,0 +1,193 @@ +import React, { useCallback } from 'react'; +import { useForm, UseFormRegisterReturn } from 'react-hook-form'; +import { useNavigate } from 'react-router-dom'; + +import { InfoCard } from '@backstage/core-components'; +import { errorApiRef, useApi, useRouteRef } from '@backstage/core-plugin-api'; + +import { + Box, + Button, + Divider, + FormControl, + Grid, + TextField, + Typography, + useMediaQuery, + useTheme, +} from '@material-ui/core'; + +import { + WORKFLOW_JSON_SAMPLE, + WORKFLOW_YAML_SAMPLE, + WorkflowFormat, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { orchestratorApiRef } from '../api'; +import { createWorkflowRouteRef, workflowDefinitionsRouteRef } from '../routes'; +import { BaseOrchestratorPage } from './BaseOrchestratorPage'; + +type FormData = { + url: string; +}; + +export const NewWorkflowViewerPage = () => { + const theme = useTheme(); + const isMobile = useMediaQuery(theme.breakpoints.down('sm')); + const errorApi = useApi(errorApiRef); + const orchestratorApi = useApi(orchestratorApiRef); + const createWorkflowLink = useRouteRef(createWorkflowRouteRef); + + const defaultValues: FormData = { + url: WORKFLOW_JSON_SAMPLE.url, + }; + const { handleSubmit, register, formState } = useForm({ + defaultValues, + mode: 'onChange', + }); + + const { errors } = formState; + + const navigate = useNavigate(); + const definitionLink = useRouteRef(workflowDefinitionsRouteRef); + + const handleResult = useCallback( + async ({ url }: FormData) => { + if (!url) { + return; + } + try { + const result = await orchestratorApi.createWorkflowDefinition(url); + + if (!result?.definition.id) { + errorApi.post(new Error('error importing workflow')); + } else { + const workflowFormat = result.uri.endsWith('.json') ? 'json' : 'yaml'; + navigate( + definitionLink({ + workflowId: result.definition.id, + format: workflowFormat, + }), + ); + } + } catch (e: any) { + errorApi.post(new Error(e)); + } + }, + [orchestratorApi, errorApi, navigate, definitionLink], + ); + + const newWorkflow = useCallback( + (format: WorkflowFormat) => { + navigate( + createWorkflowLink({ + format, + }), + ); + }, + [createWorkflowLink, navigate], + ); + + function asInputRef(renderResult: UseFormRegisterReturn) { + const { ref, ...rest } = renderResult; + return { + inputRef: ref, + ...rest, + }; + } + + const contentItems = [ + + + + + + , + + + + +
+ + + + +
+
+ + + + + + or from a Sample + + + + +
+
+
, + ]; + + return ( + + + {isMobile ? contentItems : [...contentItems].reverse()} + + + ); +}; diff --git a/plugins/orchestrator/src/components/OrchestratorPage.stories.tsx b/plugins/orchestrator/src/components/OrchestratorPage.stories.tsx new file mode 100644 index 0000000000..1357998b2f --- /dev/null +++ b/plugins/orchestrator/src/components/OrchestratorPage.stories.tsx @@ -0,0 +1,96 @@ +import React from 'react'; +import { Route, Routes } from 'react-router-dom'; + +import { featureFlagsApiRef } from '@backstage/core-plugin-api'; +import { TestApiProvider, wrapInTestApp } from '@backstage/test-utils'; + +import { Meta, StoryObj } from '@storybook/react'; + +import { + FEATURE_FLAG_DEVELOPER_MODE, + WorkflowOverview, +} from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { createFakeFeatureFlagsApi } from '../__fixtures__/fakeFeatureFlagsApi'; +import { fakeProcessInstances } from '../__fixtures__/fakeProcessInstance'; +import { fakeWorkflowItem } from '../__fixtures__/fakeWorkflowItem'; +import { fakeWorkflowOverviewList } from '../__fixtures__/fakeWorkflowOverviewList'; +import { fakeWorkflowSpecs } from '../__fixtures__/fakeWorkflowSpecs'; +import { orchestratorApiRef } from '../api'; +import { MockOrchestratorClient } from '../api/MockOrchestratorClient'; +import { orchestratorRootRouteRef } from '../routes'; +import { OrchestratorPage } from './OrchestratorPage'; + +/** This component is used in order to correctly render nested components using the `TabbedLayout.Route` component. */ +const TestRouter: React.FC> = ({ children }) => ( + + {children}} /> + +); + +const meta = { + title: 'Orchestrator/next/OrchestratorPage', + component: OrchestratorPage, + decorators: [ + ( + Story, + context: { + args: { + items?: WorkflowOverview[]; + api?: MockOrchestratorClient; + featureFlags?: string[]; + }; + }, + ) => { + const items = context.args.items || fakeWorkflowOverviewList; + const mockApi = new MockOrchestratorClient({ + getInstancesResponse: Promise.resolve(fakeProcessInstances), + listWorkflowsOverviewResponse: Promise.resolve({ + limit: 0, + offset: 0, + totalCount: 0, + items, + }), + getWorkflowResponse: Promise.resolve(fakeWorkflowItem), + getSpecsResponse: Promise.resolve(fakeWorkflowSpecs), + }); + return wrapInTestApp( + + + + + , + { + mountedRoutes: { + '/orchestrator': orchestratorRootRouteRef, + }, + }, + ); + }, + ], +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const OrchestratorPageStory: Story = { + name: 'Sample 1', + args: { + items: fakeWorkflowOverviewList.slice(0, 3), + }, +}; + +export const EditMode: Story = { + name: 'Edit mode', + args: { + featureFlags: FEATURE_FLAG_DEVELOPER_MODE, + }, +}; diff --git a/plugins/orchestrator/src/components/OrchestratorPage.tsx b/plugins/orchestrator/src/components/OrchestratorPage.tsx new file mode 100644 index 0000000000..beb2c68884 --- /dev/null +++ b/plugins/orchestrator/src/components/OrchestratorPage.tsx @@ -0,0 +1,26 @@ +import React from 'react'; + +import { TabbedLayout } from '@backstage/core-components'; + +import { workflowInstancesRouteRef } from '../routes'; +import { BaseOrchestratorPage } from './BaseOrchestratorPage'; +import { WorkflowRunsTabContent } from './WorkflowRunsTabContent'; +import { WorkflowsTabContent } from './WorkflowsTabContent'; + +export const OrchestratorPage = () => { + return ( + + + + + + + + + + + ); +}; diff --git a/plugins/orchestrator/src/components/Paragraph.tsx b/plugins/orchestrator/src/components/Paragraph.tsx new file mode 100644 index 0000000000..f2b8921a3c --- /dev/null +++ b/plugins/orchestrator/src/components/Paragraph.tsx @@ -0,0 +1,18 @@ +import React, { PropsWithChildren } from 'react'; + +import { Typography } from '@material-ui/core'; +import { Variant } from '@material-ui/core/styles/createTypography'; + +export const Paragraph = ( + props: PropsWithChildren<{ variant?: Variant | 'inherit' }>, +) => { + return ( + + {props.children} + + ); +}; diff --git a/plugins/orchestrator/src/components/Router.tsx b/plugins/orchestrator/src/components/Router.tsx new file mode 100644 index 0000000000..f691e9a48c --- /dev/null +++ b/plugins/orchestrator/src/components/Router.tsx @@ -0,0 +1,70 @@ +import React from 'react'; +import { Route, Routes } from 'react-router-dom'; + +import { FeatureFlagged } from '@backstage/core-app-api'; + +import { FEATURE_FLAG_DEVELOPER_MODE } from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { + createWorkflowRouteRef, + editWorkflowRouteRef, + executeWorkflowRouteRef, + executeWorkflowWithBusinessKeyRouteRef, + newWorkflowRef, + workflowDefinitionsRouteRef, + workflowInstanceRouteRef, +} from '../routes'; +import { CreateWorkflowPage } from './CreateWorkflowPage'; +import { ExecuteWorkflowPage } from './ExecuteWorkflowPage/ExecuteWorkflowPage'; +import { NewWorkflowViewerPage } from './NewWorkflowViewerPage'; +import { OrchestratorPage } from './OrchestratorPage'; +import { WorkflowDefinitionViewerPage } from './WorkflowDefinitionViewerPage'; +import { WorkflowInstancePage } from './WorkflowInstancePage'; + +export const Router = () => { + return ( + + } /> + } + /> + } + /> + + + + } + /> + + + + } + /> + + + + } + /> + } + /> + } + /> + + ); +}; diff --git a/plugins/orchestrator/src/components/ScaffolderTemplateCard/OrchestratorScaffolderTemplateCard.tsx b/plugins/orchestrator/src/components/ScaffolderTemplateCard/OrchestratorScaffolderTemplateCard.tsx new file mode 100644 index 0000000000..8d971158a0 --- /dev/null +++ b/plugins/orchestrator/src/components/ScaffolderTemplateCard/OrchestratorScaffolderTemplateCard.tsx @@ -0,0 +1,37 @@ +import React, { useCallback } from 'react'; +import { useNavigate } from 'react-router-dom'; + +import { useRouteRef } from '@backstage/core-plugin-api'; +import { TemplateEntityV1beta3 } from '@backstage/plugin-scaffolder-common'; +import { + TemplateCard, + TemplateCardProps, +} from '@backstage/plugin-scaffolder-react/alpha'; + +import { WORKFLOW_TYPE } from '@janus-idp/backstage-plugin-orchestrator-common'; + +import { executeWorkflowRouteRef } from '../../routes'; + +export const OrchestratorScaffolderTemplateCard = ( + props: TemplateCardProps, +) => { + const { onSelected } = props; + const navigate = useNavigate(); + const executeWorkflowLink = useRouteRef(executeWorkflowRouteRef); + + const onSelectedExtended = useCallback( + (template: TemplateEntityV1beta3) => { + const isWorkflow = template.spec.type === WORKFLOW_TYPE; + + if (!isWorkflow) { + onSelected?.(template); + return; + } + + navigate(executeWorkflowLink({ workflowId: template.metadata.name })); + }, + [executeWorkflowLink, navigate, onSelected], + ); + + return ; +}; diff --git a/plugins/orchestrator/src/components/ScaffolderTemplateCard/index.ts b/plugins/orchestrator/src/components/ScaffolderTemplateCard/index.ts new file mode 100644 index 0000000000..7b2665ad89 --- /dev/null +++ b/plugins/orchestrator/src/components/ScaffolderTemplateCard/index.ts @@ -0,0 +1 @@ +export { OrchestratorScaffolderTemplateCard } from './OrchestratorScaffolderTemplateCard'; diff --git a/plugins/orchestrator/src/components/Selector.tsx b/plugins/orchestrator/src/components/Selector.tsx new file mode 100644 index 0000000000..cb8aef8f3a --- /dev/null +++ b/plugins/orchestrator/src/components/Selector.tsx @@ -0,0 +1,68 @@ +import React from 'react'; + +import { Select } from '@backstage/core-components'; + +import { makeStyles, Typography } from '@material-ui/core'; + +const useStyles = makeStyles(theme => ({ + root: { + display: 'flex', + alignItems: 'baseline', + '& label + div': { + marginTop: '0px', + }, + '& select': { + width: '7rem', + }, + }, + label: { + color: theme.palette.text.primary, + fontSize: theme.typography.fontSize, + paddingRight: '0.5rem', + fontWeight: 'bold', + }, +})); + +const ALL_ITEMS = '___all___'; + +type BackstageSelectProps = Parameters[0]; +export type SelectorProps = Omit & { + includeAll?: boolean; + onChange: (item: string) => void; +}; + +export const Selector = ({ + includeAll = true, + ...otherProps +}: SelectorProps) => { + const styles = useStyles(); + + const selectItems = React.useMemo( + () => + includeAll + ? [{ label: 'All', value: ALL_ITEMS }, ...otherProps.items] + : otherProps.items, + [includeAll, otherProps.items], + ); + + const handleChange = React.useCallback( + item => otherProps.onChange(item as string), + [otherProps], + ); + + return ( +
+ {otherProps.label} +