From 9edddbc85f23dd0cbfb2ad5fee15b1e6f421e021 Mon Sep 17 00:00:00 2001 From: sakshi godspeed Date: Tue, 29 Oct 2024 16:56:07 +0530 Subject: [PATCH] events --- docs/microservices-framework/CRUD_API.md | 5 +- .../create-custom-event-source.md | 2 +- .../event-sources/event-schema.md | 107 ++----- .../event-source-plugins/Kafka Eventsource.md | 15 +- .../event-sources/event-types/cron-events.md | 2 +- .../event-types/graphql-events.md | 28 +- .../event-sources/event-types/kafka-events.md | 12 +- .../event-sources/event-types/overview.md | 17 +- .../event-sources/events-overview.md | 7 + .../event-sources/overview.md | 4 +- .../guide/get-started.md | 72 +++-- sidebars.js | 269 +++++++++--------- static/setup.bat | 34 +++ static/setup.sh | 67 +++++ 14 files changed, 341 insertions(+), 300 deletions(-) create mode 100644 docs/microservices-framework/event-sources/events-overview.md create mode 100644 static/setup.bat create mode 100644 static/setup.sh diff --git a/docs/microservices-framework/CRUD_API.md b/docs/microservices-framework/CRUD_API.md index c5084c6..f3e860a 100644 --- a/docs/microservices-framework/CRUD_API.md +++ b/docs/microservices-framework/CRUD_API.md @@ -8,8 +8,7 @@ The gen-crud-api command in Godspeed is a powerful tool that automatically gener The framework generates CRUD API using Prisma's database model files and ORM client. It uses Godspeed's [Prisma plugin](./datasources/datasource-plugins/Prisma%20Datasource.md) as the ORM and generates **http** eventsource based CRUD APIs by default. **Currently supported eventsources:** -- Http eventsource: [Express](./event-sources/event-source-plugins/Express%20Http%20Eventsource.md) -- Http eventsource: [Fastify](./event-sources/event-source-plugins/Fastify%20Eventsource.md) +- Http eventsources: [Express](./event-sources/event-source-plugins/Express%20Http%20Eventsource.md), [Fastify](./event-sources/event-source-plugins/Fastify%20Eventsource.md) - Graphql eventsource: [Apollo Graphql](./event-sources/event-source-plugins/Apollo%20GraphQl%20Eventsource.md) ### Steps to generate CRUD API over REST and Graphql @@ -75,7 +74,7 @@ If your schema name is **lms.prisma**, your file content should look like this. 4.1 If you already have an existing database, you can [introspect it](https://www.prisma.io/docs/getting-started/setup-prisma/add-to-existing-project/relational-databases/introspection-typescript-postgresql) and generate the Prisma model file using `prisma db pull`. This will generate your .prisma file. - 4.2 Copy the generated file to `src/datasources` folder and rename it as per the name of this datasource that you want to keep. If you don't have an existing database setup with a model, then create a prisma model file from scratch. --> + 4.2 Copy the generated file to `src/datasources` folder and rename it as per the name of this datasource that you want to keep. If you don't have an existing database setup with a model, then create a prisma model file from scratch. 4.3 Make sure to note the `output` parameter in the .prisma file which should point to location in `src/datasources/prisma-clients/` and `previewFeatures` is to be added in case you want to generate metrics for prisma queries for telemetry. diff --git a/docs/microservices-framework/event-sources/create-custom-event-source.md b/docs/microservices-framework/event-sources/create-custom-event-source.md index 800ae07..99c6687 100644 --- a/docs/microservices-framework/event-sources/create-custom-event-source.md +++ b/docs/microservices-framework/event-sources/create-custom-event-source.md @@ -1,4 +1,4 @@ -# Create a Custom Eventsource. +# Create a Custom Eventsource ## About Eventsources diff --git a/docs/microservices-framework/event-sources/event-schema.md b/docs/microservices-framework/event-sources/event-schema.md index 2d98f9e..a13574f 100644 --- a/docs/microservices-framework/event-sources/event-schema.md +++ b/docs/microservices-framework/event-sources/event-schema.md @@ -1,12 +1,9 @@ # Event Schema -In the meta-framework world, we call sync events (different APIs) and async events (ex. Kafka, Socket, Cron) as events altogether. - -An event schema defines -- The structured format or blueprint for representing data within an event -- Authentication and authorization policy -- Input and output validations -- The event handler - the business logic for handling that event -- The documentation of the event (for publishing the API spec) +The event schema, for each eventsource, closely follows the OpenAPI specification. It includes +- The name/topic/URL of the event +- The event handler workflow(fn) +- Input and output schema with the validation error handling +- [Authorization](/docs/microservices-framework/authorization/overview.md) checks It outlines the specific fields, data types, and structure that an event must adhere to. The schema serves as a standardized template, ensuring consistency in the implementation across projects in a company, whereby many kinds of eventsources are used. @@ -44,16 +41,24 @@ Lets understand the first line from the above snippet `http.get./greet`. `http`: Protocol http eventsource (can be any) -`get` : method (depends on the eventsource used. Can be topic for Kafka, for example.) +`get` : method (depends on the eventsource used. Can be topic for Kafka) `/helloworld`: endpoint (In case of http and graphql sources. Can be groupId in case of Kafka for ex.) We are exposing an endpoint with a `get` method on `http` protocol. This endpoint is calling an eventhandler called `helloworld` in the second line. Event handlers can be functions written in typescript, javascript or yaml workflows in Godspeed's DSL format. In the above example the helloworld function exists in `src/functions` directory. +:::tip Note +When switching between eventsources, the event schema undergoes significant changes. In the case of HTTP events, the start line includes the eventsource name, method, and path. However, for Kafka events, the start line combines the datasource name, topic name, and group ID. +::: -## Http +Points to be undertaken : +- The first line is changed for each protocol. +- You can apply multiple compatible eventsource instances in a URI for ex. `graphql & http.get./greeting` +- Async consumers like Kafka dont need authentication or authorization, and don't have a response +- Async events like Cron do not have any input. -### Example HTTP Schema +
+ Example HTTP Schema ```yaml http.get./greet: #The initial line depicts a fusion of the event, the employed method, and the path associated with the event. @@ -76,14 +81,16 @@ http.get./greet: #The initial line depicts a fusion of the event, the employed m content: application/json: schema: - type: string + type: object 200: content: application/json: schema: type: object ``` -**To get a quick understanding of HTTP event scehma, please watch the video provided below…** +
+ +**To get a quick understanding of Event scehma, please watch the video provided below…**
@@ -93,77 +100,3 @@ http.get./greet: #The initial line depicts a fusion of the event, the employed m
--> -## Kafka - -The structure of Kafka event schema - -> A [Kafka](https://github.com/godspeedsystems/gs-plugins/tree/main/plugins/kafka-as-datasource-as-eventsource#godspeed-plugin-kafka-as-datasource-as-eventsource) event is specified as `{datasourceName}.{topic_name}.{group_id}` in [the Kafka event specification](#example-spec-for-kafka-event). - -Within the Kafka event structure, the content of the message is captured and made accessible as `inputs.body`, facilitating its integration into the handler workflow for processing. - -### Example spec for Kafka event - -``` yaml - # event for consume data from Topic -Kafka.publish-producer1.kafka_proj: // event key - id: kafka_consumer - fn: kafka_consume - body: - description: The body of the query - content: - application/json: - schema: - type: string - ``` - -## Apollo Graphql - -### GraphQL Configuration -The Apollo Graphql plugin is currently configured and functions exactly the same as Express and Fastify eventsources. Except that it doesn't have swagger config and doesn't support file upload as of now. - -(src/eventsources/apollo.yaml) -```yaml -type: graphql -port: 4000 -#eventsource level default settings (can be overriden at event level) -authn: -authz: -on_request_validation_error: -on_response_validation_error: -``` - -:::tip note -Ensure the event key prefix aligns with the name of the configuration YAML file. In this example, the prefix for the Event key is Apollo. The event schema follows REST standards, resembling HTTP events. -::: - -### Apollo Graphql event schema - -(src/events/create_category.yaml) -```yaml -apollo.post./mongo/category: - summary: Create a new Category - description: Create Category from the database - fn: create - body: - content: - application/json: - schema: - type: object - properties: - name: - type: string - responses: - content: - application/json: - schema: - type: object -``` - -:::tip note -- The first line is changed for each protocol. -- You can apply multiple compatible eventsource instances in a URI for ex. `graphql & http.get./greeting` -- Async consumers like Kafka dont need authentication or authorization, and don't have a response -- Async Cron does not have any input either, unlike Kafka. - -- Two types of events- sync([http](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/express-as-http/README.md),[Apollo Graphql](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/graphql-as-eventsource/README.md)) and async([cron](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/cron-as-eventsource/README.md),[kafka](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/kafka-as-datasource-as-eventsource/README.md)) -::: \ No newline at end of file diff --git a/docs/microservices-framework/event-sources/event-source-plugins/Kafka Eventsource.md b/docs/microservices-framework/event-sources/event-source-plugins/Kafka Eventsource.md index 55d8e55..0fc2a27 100644 --- a/docs/microservices-framework/event-sources/event-source-plugins/Kafka Eventsource.md +++ b/docs/microservices-framework/event-sources/event-source-plugins/Kafka Eventsource.md @@ -57,20 +57,11 @@ In the event, we establish an HTTP endpoint that accepts parameters such as the message: type: string required: ['message'] - responses: - 200: - content: - application/json: - schema: - type: object - properties: - name: - type: string ``` #### kafka workflow for Producer ( src/functions/kafka-publish.yaml ) -In workflow we need to mension `datasource.kafka.producer` as function (fn) to Produce data. +In workflow we need to mension `datasource.kafka.producer` as function (fn) to produce data. ```yaml id: kafka-publish @@ -80,7 +71,7 @@ tasks: fn: datasource.kafka.producer args: topic: "publish-producer1" - message: <% inputs.body.message%> + message: <% inputs.body.message %> ``` ### Example usage EventSource (Consumer): @@ -103,7 +94,7 @@ To use Consumer we need to follow the below event key format. The consumer event is triggered whenever a new message arrives on the specified topic. Upon triggering, it retrieves the incoming message and forwards it to the `kafka_consume` function. Inside this function, the incoming message is processed, and the result is then returned. ``` yaml -# event for consume data from Topic +# event to consume data from Topic kafka.publish-producer1.kafka_proj: // event key id: kafka__consumer fn: kafka_consume diff --git a/docs/microservices-framework/event-sources/event-types/cron-events.md b/docs/microservices-framework/event-sources/event-types/cron-events.md index b6b4095..39af0e1 100644 --- a/docs/microservices-framework/event-sources/event-types/cron-events.md +++ b/docs/microservices-framework/event-sources/event-types/cron-events.md @@ -12,7 +12,7 @@ ( src/events/every_minute_task.yaml ) ```yaml # event for Scheduling a task for every minute. -cron.* * * * *.Asia/Kolkata: //event key +cron.* * * * *.Asia/Kolkata: # event key fn: every_minute ``` diff --git a/docs/microservices-framework/event-sources/event-types/graphql-events.md b/docs/microservices-framework/event-sources/event-types/graphql-events.md index e68c333..c9e9e3e 100644 --- a/docs/microservices-framework/event-sources/event-types/graphql-events.md +++ b/docs/microservices-framework/event-sources/event-types/graphql-events.md @@ -1,35 +1,36 @@ -# Graphql Event +# Apollo Graphql Event -- The GraphQL event configuration in Godspeed allows seamless integration of GraphQL APIs, emphasizing simplicity and efficiency in API development. The configuration file (Apollo.yaml) specifies the GraphQL type and port, ensuring alignment with the event key prefix. +The GraphQL event configuration in Godspeed allows seamless integration of GraphQL APIs, emphasizing simplicity and efficiency in API development. The configuration file (Apollo.yaml) specifies the GraphQL type and port, ensuring alignment with the event key prefix. ### GraphQL Configuration +The Apollo Graphql plugin is currently configured exactly the same as Express and Fastify eventsources. Except that it doesn't have swagger config and doesn't support file upload as of now. -(src/eventsources/Apollo.yaml) +(src/eventsources/apollo.yaml) ```yaml + type: graphql port: 4000 +# eventsource level default settings (can be overriden at event level) +authn: +authz: +on_request_validation_error: +on_response_validation_error: + ``` + :::tip note -Ensure the event key prefix aligns with the name of the configuration YAML file. In this example, the prefix for the Event key is Apollo. The event schema follows REST standards, resembling HTTP events. +Ensure the event key prefix aligns with the name of the configuration YAML file. In this example, the prefix for the Event key is `apollo` as per the yaml file name (src/eventsources/apollo.yaml). The event schema follows REST standards, resembling HTTP events. ::: ### GraphQL Event (src/events/create_category.yaml) ```yaml -graphql.post./mongo/category: +apollo.post./mongo/category: // event key having prefix apollo summary: Create a new Category description: Create Category from the database fn: create body: - content: - application/json: - schema: - type: object - properties: - name: - type: string - responses: content: application/json: schema: @@ -56,3 +57,4 @@ tasks: ::: This configuration emphasizes the simplicity of implementing GraphQL within the Godspeed framework, promoting efficiency and clarity in API development. + diff --git a/docs/microservices-framework/event-sources/event-types/kafka-events.md b/docs/microservices-framework/event-sources/event-types/kafka-events.md index 5c3d5bf..fb0d770 100644 --- a/docs/microservices-framework/event-sources/event-types/kafka-events.md +++ b/docs/microservices-framework/event-sources/event-types/kafka-events.md @@ -7,10 +7,18 @@ Within the Kafka event structure, the content of the message is captured and mad ``` yaml # event for consume data from Topic -Kafka.publish-producer1.kafka_proj: // event key +Kafka.publish-producer1.kafka_proj: # event key id: kafka_consumer fn: kafka_consume - body: #same body structure for all the events + body: + content: + application/json: + schema: + type: object + properties: + message: # the content of the message is captured here + type: string + required: ['message'] ``` #### Example workflow consuming a Kafka event diff --git a/docs/microservices-framework/event-sources/event-types/overview.md b/docs/microservices-framework/event-sources/event-types/overview.md index dd68eba..59e7105 100644 --- a/docs/microservices-framework/event-sources/event-types/overview.md +++ b/docs/microservices-framework/event-sources/event-types/overview.md @@ -1,19 +1,4 @@ -# Events and Types -## Introduction -In the realm of microservices architecture, events serve as the lifeblood of communication and coordination. Microservices can be configured to consume events from various sources, such as HTTP endpoints and messaging systems like Kafka. These events are meticulously defined, following the OpenAPI specification, and encapsulate critical information, including event names, sources, and workflow details. - -**We closely follow the OpenAPI specification; this is a fundamental aspect of all events that adhere to a [standard structure](/docs/microservices-framework/introduction/design-principles.md#schema-driven-development), which is one of the core design principles of Godspeed, regardless of their source or protocol.** - - - -The event schema, for each eventsource, closely follows the OpenAPI specification. It includes -- The name/topic/URL of the event -- The event handler workflow(fn) -- Input and output schema with the validation error handling -- [Authorization](/docs/microservices-framework/authorization/overview.md) checks - - -## Event types +# Event types Based on how processing is handled ,events can be classified into two types: synchronous (sync) and asynchronous (async) events, each suited for various protocols. diff --git a/docs/microservices-framework/event-sources/events-overview.md b/docs/microservices-framework/event-sources/events-overview.md new file mode 100644 index 0000000..c0b7ce7 --- /dev/null +++ b/docs/microservices-framework/event-sources/events-overview.md @@ -0,0 +1,7 @@ +# Introduction To Events +In the realm of microservices architecture, events serve as the lifeblood of communication and coordination. Microservices can be configured to consume events from various sources, such as HTTP endpoints and messaging systems like Kafka. These events are meticulously defined, following the OpenAPI specification, and encapsulate critical information, including event names, sources, and workflow details. + +In the meta-framework world, we call all types of sync and async events (ex. Kafka, Socket, Cron) as events. +- Two types of events- sync ([http](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/express-as-http/README.md), [Apollo Graphql](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/graphql-as-eventsource/README.md)) and async ([cron](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/cron-as-eventsource/README.md), [kafka](https://github.com/godspeedsystems/gs-plugins/blob/main/plugins/kafka-as-datasource-as-eventsource/README.md)) + +**We closely follow the OpenAPI specification; this is a fundamental aspect of all events that adhere to a [standard structure](/docs/microservices-framework/introduction/design-principles.md#schema-driven-development), which is one of the core design principles of Godspeed, regardless of their source or protocol.** diff --git a/docs/microservices-framework/event-sources/overview.md b/docs/microservices-framework/event-sources/overview.md index 6255884..09492fb 100644 --- a/docs/microservices-framework/event-sources/overview.md +++ b/docs/microservices-framework/event-sources/overview.md @@ -5,7 +5,9 @@ title: Eventsources Eventsources in Godspeed framework captures event and allows you to define entry or trigger points of application. For ex. the `type: express` eventsource will allow you to expose your application through REST API or a `type: cron` eventsource will allow to schedule a recurring call to a workflow. - The eventsources listen on the incoming events. They process incoming event as per the middleware set by you, including [authentication](../authentication/overview.md). Finally, they transform it to Godspeed's standard `GSCloudEvent` object, which is then made available to the event handlers and subsequent child workflows. To have a look at supported eventsources and understanding their implementation, refer [Godspeed's gs-plugins mono-repo](https://github.com/godspeedsystems/gs-plugins). For ex. [Kafka](https://github.com/godspeedsystems/gs-plugins/tree/main/plugins/kafka-as-datasource-as-eventsource#godspeed-plugin-kafka-as-datasource-as-eventsource)** + The eventsources listen on the incoming events. They process incoming event as per the middleware set by you, including [authentication](../authentication/overview.md). Finally, they transform it to Godspeed's standard `GSCloudEvent` object, which is then made available to the event handlers and subsequent child workflows. + + To have a look at supported eventsources and understanding their implementation, refer [Godspeed's gs-plugins mono-repo](https://github.com/godspeedsystems/gs-plugins). For ex. [Kafka](https://github.com/godspeedsystems/gs-plugins/tree/main/plugins/kafka-as-datasource-as-eventsource#godspeed-plugin-kafka-as-datasource-as-eventsource)** ## Types of eventsources diff --git a/docs/microservices-framework/guide/get-started.md b/docs/microservices-framework/guide/get-started.md index d064088..562f026 100644 --- a/docs/microservices-framework/guide/get-started.md +++ b/docs/microservices-framework/guide/get-started.md @@ -1,23 +1,58 @@ # Getting Starting with Godspeed +**This guide will walk you through:** +- Installing the Godspeed Meta-Framework on your system. +- Creating your first project and running it locally. -In this section, you will learn how to -1. Install the meta-framework using the command line interface (CLI) -2. Create a new project in godspeed -3. Open your API endpoint in Swagger UI and test it out +:::tip Need Help? +**[Ask Godspeed GPT First!](https://chatgpt.com/g/g-Zsh9qhlGw-vishwakarma)** +Whether you're having trouble with setup, configurations or understanding the framework, try asking [Godspeed GPT](https://chatgpt.com/g/g-Zsh9qhlGw-vishwakarma) +::: ### Pre-requisites: - 1. Nodejs v18 (or higher) or Bunjs 2. Npm -2. Git -3. VS Code or any code editor -3. Linux, Mac, Windows and other OS supporting Nodejs or Bunjs ---- -This guide walks you through the installation of Godspeed, and how to create and run your first project using the `create` command. It also provides troubleshooting solutions for common errors. -### **Step 1: Install Godspeed** +3. Git +4. VS Code or any code editor + +### **Step 1: Install Godspeed** +:::tip +To install prerequisites and Godspeed through our Easy Installation Script, Download it from the link provided below: +::: +- [setup.bat](../../../static/setup.bat) (for Windows) +- [setup.sh](../../../static/setup.sh) (for Ubuntu) + +It simplifies the installation process by checking all required tools in one go. + +
+ See How to execute this script in Windows + +1. Run Command Prompt as Administrator. + +2. Use cd command to change the directory to where you downloaded the setup.bat file. + +3. Execute the script by writing its name. +``` + setup.bat +``` +
+ +
+ See How to execute this script in Ubuntu + + After downloading setup.sh file, Just execute it from shell as: + +``` + sudo bash setup.sh +``` +
+ + +Once the script finishes, you are ready to **[Create Your First Project](/docs/microservices-framework/guide/get-started#step-2-create-your-first-project)** + +### To Install godspeed framework manually follow the steps given below: 1. **Ensure Node.js, Npm and Git is installed**: - - Verify versions by running the following commands: + - Verify versions by running the following commands from terminal: ```bash node -v npm -v @@ -36,7 +71,7 @@ This guide walks you through the installation of Godspeed, and how to create and godspeed --version ``` --- -### **Step 2: Create Your First Godspeed Project** +### **Step 2: Create Your First Project** 1. **Create a new Godspeed project**: - Use the `create` command to set up a new project: @@ -51,10 +86,9 @@ This guide walks you through the installation of Godspeed, and how to create and cd my_new_project ``` 3. **Start the server**: - - Start the project with this command: - ```bash + ``` godspeed serve - ``` + ``` Check the logs. They should indicate that the **Express server** is running on **port 3000**. Example log: ```bash @@ -221,11 +255,6 @@ There is a longer and detailed introduction video as well, below on this page. -### Some important commands - - - - > If you want some pre-made examples please check the [examples repository](https://github.com/godspeedsystems/gs-node-templates) @@ -237,4 +266,3 @@ There is a longer and detailed introduction video as well, below on this page. - diff --git a/sidebars.js b/sidebars.js index 9a2b8e9..766af90 100644 --- a/sidebars.js +++ b/sidebars.js @@ -102,106 +102,48 @@ const sidebars = { }, { type: "doc", - label: "5.2. Event Schema", - id: "microservices-framework/event-sources/event-schema", - }, - - { - type: "category", - label: "5.3. Event Types", - items: [ - { - type: "doc", - label: "5.3.1. Overview", - id: "microservices-framework/event-sources/event-types/overview", - }, - { - type: "doc", - label: "5.3.2. Http Events", - id: "microservices-framework/event-sources/event-types/http-events", - }, - { - type: "doc", - label: "5.3.3. Cron Events", - id: "microservices-framework/event-sources/event-types/cron-events", - }, - { - type: "doc", - label: "5.3.4. Kafka Events", - id: "microservices-framework/event-sources/event-types/kafka-events", - }, - { - type: "doc", - label: "5.3.5. Apollo Graphql Events", - id: "microservices-framework/event-sources/event-types/graphql-events", - }, - ], - }, - { - type: "category", - label: "5.4. Validations", - items: [ - { - type: "doc", - label: "5.4.1. Schema Validation", - id: "microservices-framework/event-sources/validations/schema-validation", - }, - { - type: "doc", - label: "5.4.2. Validation Error", - id: "microservices-framework/event-sources/validations/validation-error", - }, - ], - }, - // { - // type: "doc", - // label: "4.4. Schema Validation", - // id: "microservices-framework/event-sources/schema-validation" - // }, - { - type: "doc", - label: "5.5. Create custom event source", + label: "5.2. Create custom event source", id: "microservices-framework/event-sources/create-custom-event-source", }, { type: "doc", - label: "5.6. Create event source plugin", + label: "5.3. Create event source plugin", id: "microservices-framework/event-sources/create-eventsource-plugin", }, { type: "category", - label: "5.7. Eventsource Plugins", + label: "5.4. Eventsource Plugins", // id: "microservices-framework/event-sources/event-source-plugins", items: [ { type: "doc", - label: "5.7.1. Overview", + label: "5.4.1. Overview", id: "microservices-framework/event-sources/event-source-plugins/Overview", }, { type: "doc", - label: "5.7.2. Express - Http Eventsource", + label: "5.4.2. Express - Http Eventsource", id: "microservices-framework/event-sources/event-source-plugins/Express Http Eventsource", }, { type: "doc", - label: "5.7.3. Cron Eventsource", + label: "5.4.3. Cron Eventsource", id: "microservices-framework/event-sources/event-source-plugins/Cron Eventsource", }, { type: "doc", - label: "5.7.4. Kafka Eventsource", + label: "5.4.4. Kafka Eventsource", id: "microservices-framework/event-sources/event-source-plugins/Kafka Eventsource", }, { type: "doc", - label: "5.7.5. GraphQl Eventsource", + label: "5.4.5. GraphQl Eventsource", id: "microservices-framework/event-sources/event-source-plugins/Apollo GraphQl Eventsource", }, { type: "doc", - label: "5.7.6. Fastify Eventsource", + label: "5.4.6. Fastify Eventsource", id: "microservices-framework/event-sources/event-source-plugins/Fastify Eventsource", }, ], @@ -220,36 +162,94 @@ const sidebars = { }, { type: "category", - label: "6. Workflows", + label: "6. Events", items: [ - { + { type: "doc", label: "6.1. Overview", + id: "microservices-framework/event-sources/events-overview", + }, + { + type: "doc", + label: "6.2. Event Schema", + id: "microservices-framework/event-sources/event-schema", + }, + { + type: "doc", + label: "6.3. Event Types", + id: "microservices-framework/event-sources/event-types/overview", + }, + + { + type: "doc", + label: "6.4. Http Events", + id: "microservices-framework/event-sources/event-types/http-events", + }, + { + type: "doc", + label: "6.5. Cron Events", + id: "microservices-framework/event-sources/event-types/cron-events", + }, + { + type: "doc", + label: "6.6. Kafka Events", + id: "microservices-framework/event-sources/event-types/kafka-events", + }, + { + type: "doc", + label: "6.7. Apollo Graphql Events", + id: "microservices-framework/event-sources/event-types/graphql-events", + }, + { + type: "category", + label: "6.8. Validations", + items: [ + { + type: "doc", + label: "6.8.1. Schema Validation", + id: "microservices-framework/event-sources/validations/schema-validation", + }, + { + type: "doc", + label: "6.8.2. Validation Error", + id: "microservices-framework/event-sources/validations/validation-error", + }, + ], + }, + ], + }, + { + type: "category", + label: "7. Workflows", + items: [ + { + type: "doc", + label: "7.1. Overview", id: "microservices-framework/workflows/overview", }, { type: "doc", - label: "6.2. Native language workflows", + label: "7.2. Native language workflows", id: "microservices-framework/workflows/native-language-functions", }, { type: "category", - label: "6.3. Yaml workflows", + label: "7.3. Yaml workflows", // id: "microservices-framework/workflows/yaml-dsl-functions" items: [ { type: "doc", - label: "6.3.1. Overview", + label: "7.3.1. Overview", id: "microservices-framework/workflows/yaml-workflows/overview", }, { type: "doc", - label: "6.3.2. Workflow DSL", + label: "7.3.2. Workflow DSL", id: "microservices-framework/workflows/yaml-workflows/workflow-dsl", }, { type: "doc", - label: "6.3.3. Inbuilt workflows", + label: "7.3.3. Inbuilt workflows", id: "microservices-framework/workflows/yaml-workflows/inbuilt-workflows", }, ], @@ -269,61 +269,61 @@ const sidebars = { }, { type: "category", - label: "7. DataSources", + label: "8. DataSources", items: [ { type: "doc", - label: "7.1. Overview", + label: "8.1. Overview", id: "microservices-framework/datasources/overview", }, { type: "category", - label: "7.2. Databases", + label: "8.2. Databases", items: [ { type: "doc", - label: "7.2.1. Overview", + label: "8.2.1. Overview", id: "microservices-framework/databases/Overview", }, { type: "doc", - label: "7.2.2. MongoDB", + label: "8.2.2. MongoDB", id: "microservices-framework/databases/MongoDB", }, { type: "doc", - label: "7.2.3. MySQL", + label: "8.2.3. MySQL", id: "microservices-framework/databases/MySQL", }, { type: "doc", - label: "7.2.4. PostgreSQL", + label: "8.2.4. PostgreSQL", id: "microservices-framework/databases/PostgreSQL", }, { type: "doc", - label: "7.2.5 SQL Server", + label: "8.2.5 SQL Server", id: "microservices-framework/databases/SQLServer", }, { type: "doc", - label: "7.2.6. SqLite", + label: "8.2.6. SqLite", id: "microservices-framework/databases/SQLite", }, { type: "doc", - label: "7.2.7 CokroachDB", + label: "8.2.7 CokroachDB", id: "microservices-framework/databases/CokroachDB", }, { type: "doc", - label: "7.2.8 MariaDB", + label: "8.2.8 MariaDB", id: "microservices-framework/databases/MariaDB", }, { type: "doc", - label: "7.2.9. PlanetScale", + label: "8.2.9. PlanetScale", id: "microservices-framework/databases/PlanetScale", }, ], @@ -331,135 +331,120 @@ const sidebars = { { type: "doc", - label: "7.3. Create Custom DataSource", + label: "8.3. Create Custom DataSource", id: "microservices-framework/datasources/create-custom-datasource", }, { type: "doc", - label: "7.4. Create DataSource Plugin", + label: "8.4. Create DataSource Plugin", id: "microservices-framework/datasources/create-datasource-plugin", }, { type: "category", - label: "7.5. Datasource Plugins", + label: "8.5. Datasource Plugins", items: [ { type: "doc", - label: "7.5.1. Overview", + label: "8.5.1. Overview", id: "microservices-framework/datasources/datasource-plugins/Overview", }, { type: "doc", - label: "7.5.2. Prisma Datasource", + label: "8.5.2. Prisma Datasource", id: "microservices-framework/datasources/datasource-plugins/Prisma Datasource", }, { type: "category", - label: "7.5.3 API Datasource", + label: "8.5.3 API Datasource", items: [ { type: "doc", - label: "7.5.3.1 HTTP (Axios)", + label: "8.5.3.1 HTTP (Axios)", id: "microservices-framework/datasources/datasource-plugins/Axios Datasource", }, ] }, { type: "doc", - label: "7.5.4. AWS Datasource", + label: "8.5.4. AWS Datasource", id: "microservices-framework/datasources/datasource-plugins/AWS Datasource", }, { type: "doc", - label: "7.5.5. Nodemailer Datasource", + label: "8.5.5. Nodemailer Datasource", id: "microservices-framework/datasources/datasource-plugins/Nodemailer Datasource", }, { type: "doc", - label: "7.5.6. Redis Datasource", + label: "8.5.6. Redis Datasource", id: "microservices-framework/datasources/datasource-plugins/Redis Datasource", }, { type: "doc", - label: "7.5.7. Mongoose Datasource", + label: "8.5.7. Mongoose Datasource", id: "microservices-framework/datasources/datasource-plugins/Mongoose Datasource", }, { type: "doc", - label: "7.5.8. Kafka Datasource", + label: "8.5.8. Kafka Datasource", id: "microservices-framework/datasources/datasource-plugins/Kafka Datasource", }, { type: "category", - label: "7.5.9 ElasticGraph", + label: "8.5.9 ElasticGraph", items: [ { type: "doc", - label: "7.5.9.1 Elasticgraph as Datasource", + label: "8.5.9.1 Elasticgraph as Datasource", id: "microservices-framework/datasources/datasource-plugins/elasticgraph/elasticgraph", }, { type: "doc", - label: "7.5.9.2 Feature Set of Elasticgraph", + label: "8.5.9.2 Feature Set of Elasticgraph", id: "microservices-framework/datasources/datasource-plugins/elasticgraph/feature-set-of-elasticgraph", }, ], }, { type: "doc", - label: "7.5.10. Memcached Datasource", + label: "8.5.10. Memcached Datasource", id: "microservices-framework/datasources/datasource-plugins/Memcached Datasource", }, ], }, { type: "doc", - label: "7.6. Caching", + label: "8.6. Caching", id: "microservices-framework/datasources/caching", }, ], }, - // { - // type: "category", - // label: "7. Authentication", - // items: [ - // { - // type: "doc", - // label: "7.1. Overview", - // id: "authentication/Overview" - // }, - // { - // type: "doc", - // label: "7.2. Configuration", - // id: "authentication/configuration" - // }, - // ], - // }, + { type: "category", - label: "8. Authentication", + label: "9. Authentication", items: [ { type: "doc", - label: "8.1.Overview", + label: "9.1.Overview", id: "microservices-framework/authentication/overview", }, { type: "doc", - label: "8.2.JWT Authentication", + label: "9.2.JWT Authentication", id: "microservices-framework/authentication/jwt-authentication", }, { type: "doc", - label: "8.3. Custom Authentication", + label: "9.3. Custom Authentication", id: "microservices-framework/authentication/custom-authentication", }, { type: "doc", - label: "8.4. OAuth2 Authentication", + label: "9.4. OAuth2 Authentication", id: "microservices-framework/authentication/oauth2-authentication", }, @@ -467,48 +452,48 @@ const sidebars = { }, { type: "category", - label: "9. Authorization", + label: "10. Authorization", items: [ { type: "doc", - label: "9.1.Overview", + label: "10.1.Overview", id: "microservices-framework/authorization/overview", }, { type: "doc", - label: "9.2.Authz Usecases", + label: "10.2.Authz Usecases", id: "microservices-framework/authorization/authz-usecases", }, ], }, { type: "category", - label: "10. Configs and Mappings", + label: "11. Configs and Mappings", items: [ { type: "doc", - label: "10.1. Config", + label: "11.1. Config", id: "microservices-framework/config-and-mappings/config", }, { type: "doc", - label: "10.2. Mappings", + label: "11.2. Mappings", id: "microservices-framework/config-and-mappings/mappings", }, ], }, { type: "category", - label: "11. Inline scripting", + label: "12. Inline scripting", items: [ { type: "doc", - label: "11.1. Overview", + label: "12.1. Overview", id: "microservices-framework/inline-scripting/overview", }, { type: "doc", - label: "11.2. Script Plugins", + label: "12.2. Script Plugins", id: "microservices-framework/inline-scripting/script-plugins", }, ], @@ -520,49 +505,49 @@ const sidebars = { // }, { type: "category", - label: "12. Telemetry", + label: "13. Telemetry", items: [ { type: "doc", - label: "12.1. Overview", + label: "13.1. Overview", id: "microservices-framework/telemetry/overview", }, { type: "category", - label: "12.2. Configuration", + label: "13.2. Configuration", items: [ { type: "doc", - label: "12.2.1. Generic", + label: "13.2.1. Generic", id: "microservices-framework/telemetry/configuration", }, { type: "doc", - label: "12.2.2. Traces", + label: "13.2.2. Traces", id: "microservices-framework/telemetry/tracing", }, { type: "doc", - label: "12.2.3. Metrics", + label: "13.2.3. Metrics", id: "microservices-framework/telemetry/metrics", }, { type: "doc", - label: "12.2.4. Logs", + label: "13.2.4. Logs", id: "microservices-framework/telemetry/logging", }, ] }, { type: "doc", - label: "12.3. Custom traces, logs and metrics (BPM)", + label: "13.3. Custom traces, logs and metrics (BPM)", id: "microservices-framework/telemetry/custom-metrics-logs-traces" }, ], }, { type: "doc", - label: "13. How to Guide", + label: "14. How to Guide", id: "microservices-framework/faqs" }, ], diff --git a/static/setup.bat b/static/setup.bat new file mode 100644 index 0000000..c19a433 --- /dev/null +++ b/static/setup.bat @@ -0,0 +1,34 @@ +@echo off +SETLOCAL + +echo Installing Node.js (version 18 or higher) and Git... + +REM Check for Node.js installation +where node >nul 2>nul +IF ERRORLEVEL 1 ( + echo Node.js is not installed. Installing Node.js... + powershell -Command "Invoke-WebRequest -Uri 'https://nodejs.org/dist/v18.20.4/node-v18.20.4-x64.msi' -OutFile 'nodejs.msi'" + start /wait msiexec.exe /i nodejs.msi /quiet + del nodejs.msi +) ELSE ( + echo Node.js is already installed. +) + +REM Check for Git installation +where git >nul 2>nul +IF ERRORLEVEL 1 ( + echo Git is not installed. Installing Git... + powershell -Command "Invoke-WebRequest -Uri 'https://github.com/git-for-windows/git/releases/download/v2.42.0.windows.1/Git-2.42.0-64-bit.exe' -OutFile 'git-installer.exe'" + start /wait git-installer.exe /SILENT + del git-installer.exe +) ELSE ( + echo Git is already installed. +) + +REM Install Godspeed +echo Installing Godspeed globally using npm... +npm install -g @godspeedsystems/godspeed + +echo Setup complete. +ENDLOCAL +pause diff --git a/static/setup.sh b/static/setup.sh new file mode 100644 index 0000000..c86cd49 --- /dev/null +++ b/static/setup.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +# Function to check the installed version of Node.js +check_node_version() { + local version=$(node -v) + local major_version=$(echo $version | grep -oP '(?<=v)\d+') + + if [ "$major_version" -ge 18 ]; then + echo "Node.js version $version is already installed." + else + echo "Node.js version is lower than 18. Installing the latest Node.js..." + install_node + fi +} + +# Install Node.js (version 18 or higher) +install_node() { + echo "Installing Node.js (version 18 or higher)..." + curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash - + sudo apt-get install -y nodejs +} + +# Install Git if not installed +echo "Checking for Git..." +if ! command -v git &> /dev/null +then + echo "Git is not installed. Installing Git..." + sudo apt-get install -y git +else + echo "Git is already installed." +fi + +# Install Node.js and npm if needed +echo "Checking for Node.js..." +if ! command -v node &> /dev/null +then + install_node +else + check_node_version +fi + +# Verify npm installation +echo "Checking for npm..." +if ! command -v npm &> /dev/null +then + echo "npm is not installed. Installing npm..." + sudo apt-get install -y npm +else + echo "npm is already installed." +fi + +# Install Godspeed globally via npm +echo "Installing Godspeed globally using npm..." +sudo npm install -g @godspeedsystems/godspeed + +# Verify installation +echo "Verifying Godspeed installation..." +if command -v godspeed &> /dev/null +then + echo "Godspeed installed successfully." +else + echo "Error: Godspeed installation failed." + exit 1 +fi + +echo "Godspeed setup complete." +