diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml
index f6f50c25abc..22561070bfd 100644
--- a/.github/workflows/quality_check.yml
+++ b/.github/workflows/quality_check.yml
@@ -20,6 +20,7 @@ on:
paths:
- "aws_lambda_powertools/**"
- "tests/**"
+ - "examples/**"
- "pyproject.toml"
- "poetry.lock"
- "mypy.ini"
@@ -30,6 +31,7 @@ on:
paths:
- "aws_lambda_powertools/**"
- "tests/**"
+ - "examples/**"
- "pyproject.toml"
- "poetry.lock"
- "mypy.ini"
diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md
index eac4f5bc9a3..4c86c983d31 100644
--- a/docs/utilities/parser.md
+++ b/docs/utilities/parser.md
@@ -2,168 +2,103 @@
title: Parser (Pydantic)
description: Utility
---
+
-This utility provides data parsing and deep validation using [Pydantic](https://pydantic-docs.helpmanual.io/){target="_blank" rel="nofollow"}.
+The Parser utility simplifies data parsing and validation using [Pydantic](https://pydantic-docs.helpmanual.io/){target="_blank" rel="nofollow"}. It allows you to define data models in pure Python classes, parse and validate incoming events, and extract only the data you need.
## Key features
-* Defines data in pure Python classes, then parse, validate and extract only what you want
-* Built-in envelopes to unwrap, extend, and validate popular event sources payloads
-* Enforces type hints at runtime with user-friendly errors
-* Support only Pydantic v2
+- Define data models using Python classes
+- Parse and validate Lambda event payloads
+- Built-in support for common AWS event sources
+- Runtime type checking with user-friendly error messages
+- Compatible with Pydantic v2.x
## Getting started
### Install
-!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}"
-
-You need to bring Pydantic v2.4.0 or later as an external dependency.
+Powertools only supports Pydantic v2, so make sure to install the required dependencies for Pydantic v2 before using the Parser.
-Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_.
+```python
+pip install aws-lambda-powertools[parser]
+```
-### Defining models
+!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}"
-You can define models to parse incoming events by inheriting from `BaseModel`.
+You can also add as a dependency in your preferred tool: `e.g., requirements.txt, pyproject.toml`, etc.
-```python title="Defining an Order data model"
-from aws_lambda_powertools.utilities.parser import BaseModel
-from typing import List, Optional
+### Data Model with Parser
-class OrderItem(BaseModel):
- id: int
- quantity: int
- description: str
+You can define models by inheriting from `BaseModel` or any other supported type through `TypeAdapter` to parse incoming events. Pydantic then validates the data, ensuring that all fields conform to the specified types and maintaining data integrity.
-class Order(BaseModel):
- id: int
- description: str
- items: List[OrderItem] # nesting models are supported
- optional_field: Optional[str] = None # this field may or may not be available when parsing
-```
+???+ info
+ The new TypeAdapter feature provide a flexible way to perform validation and serialization based on a Python type. Read more in the [Pydantic documentation](https://docs.pydantic.dev/latest/api/type_adapter/){target="_blank" rel="nofollow"}.
-These are simply Python classes that inherit from BaseModel. **Parser** enforces type hints declared in your model at runtime.
+#### Event parser
-### Parsing events
+The `@event_parser` decorator automatically parses the incoming event into the specified Pydantic model `MyEvent`. If the input doesn't match the model's structure or type requirements, it raises a `ValidationError` directly from Pydantic.
-You can parse inbound events using **event_parser** decorator, or the standalone `parse` function. Both are also able to parse either dictionary or JSON string as an input.
+=== "getting_started_with_parser.py"
-#### event_parser decorator
+ ```python hl_lines="3 11"
+ --8<-- "examples/parser/src/getting_started_with_parser.py"
+ ```
-Use the decorator for fail fast scenarios where you want your Lambda function to raise an exception in the event of a malformed payload.
+=== "Sample event"
-`event_parser` decorator will throw a `ValidationError` if your event cannot be parsed according to the model.
+ ```json
+ --8<-- "examples/parser/src/example_event_parser.json"
+ ```
-???+ note
- **This decorator will replace the `event` object with the parsed model if successful**. This means you might be careful when nesting other decorators that expect `event` to be a `dict`.
+#### Parse function
-```python hl_lines="19" title="Parsing and validating upon invocation with event_parser decorator"
-from aws_lambda_powertools.utilities.parser import event_parser, BaseModel
-from aws_lambda_powertools.utilities.typing import LambdaContext
-from typing import List, Optional
+You can use the `parse()` function when you need to have flexibility with different event formats, custom pre-parsing logic, and better exception handling.
-import json
+=== "parser_function.py"
-class OrderItem(BaseModel):
- id: int
- quantity: int
- description: str
+ ```python hl_lines="3 15"
+ --8<-- "examples/parser/src/parser_function.py"
+ ```
-class Order(BaseModel):
- id: int
- description: str
- items: List[OrderItem] # nesting models are supported
- optional_field: Optional[str] = None # this field may or may not be available when parsing
+=== "Sample event"
+ ```json
+ --8<-- "examples/parser/src/example_event_parser.json"
+ ```
-@event_parser(model=Order)
-def handler(event: Order, context: LambdaContext):
- print(event.id)
- print(event.description)
- print(event.items)
+#### Keys differences between parse and event_parser
- order_items = [item for item in event.items]
- ...
+The `parse()` function offers more flexibility and control:
-payload = {
- "id": 10876546789,
- "description": "My order",
- "items": [
- {
- "id": 1015938732,
- "quantity": 1,
- "description": "item xpto"
- }
- ]
-}
+- It allows parsing different parts of an event using multiple models.
+- You can conditionally handle events before parsing them.
+- It's useful for integrating with complex workflows where a decorator might not be sufficient.
+- It provides more control over the validation process and handling exceptions.
-handler(event=payload, context=LambdaContext())
-handler(event=json.dumps(payload), context=LambdaContext()) # also works if event is a JSON string
-```
+The `@event_parser` decorator is ideal for:
-Alternatively, you can automatically extract the model from the `event` without the need to include the model parameter in the `event_parser` function.
+- Fail-fast scenarios where you want to immediately stop execution if the event payload is invalid.
+- Simplifying your code by automatically parsing and validating the event at the function entry point.
-```python hl_lines="23 24"
- --8<-- "examples/parser/src/using_the_model_from_event.py"
-```
+### Built-in models
-#### parse function
-
-Use this standalone function when you want more control over the data validation process, for example returning a 400 error for malformed payloads.
-
-```python hl_lines="21 31" title="Using standalone parse function for more flexibility"
-from aws_lambda_powertools.utilities.parser import parse, BaseModel, ValidationError
-from typing import List, Optional
-
-class OrderItem(BaseModel):
- id: int
- quantity: int
- description: str
-
-class Order(BaseModel):
- id: int
- description: str
- items: List[OrderItem] # nesting models are supported
- optional_field: Optional[str] = None # this field may or may not be available when parsing
-
-
-payload = {
- "id": 10876546789,
- "description": "My order",
- "items": [
- {
- # this will cause a validation error
- "id": [1015938732],
- "quantity": 1,
- "description": "item xpto"
- }
- ]
-}
-
-def my_function():
- try:
- parsed_payload: Order = parse(event=payload, model=Order)
- # payload dict is now parsed into our model
- return parsed_payload.items
- except ValidationError:
- return {
- "status_code": 400,
- "message": "Invalid order"
- }
-```
+You can use pre-built models to work events from AWS services, so you don’t need to create them yourself. We’ve already done that for you!
-#### Primitive data model parsing
+=== "sqs_model_event.py"
-The parser allows you parse events into primitive data types, such as `dict` or classes that don't inherit from `BaseModel`. The following example shows you how to parse a [`Union`](https://docs.pydantic.dev/latest/api/standard_library_types/#union):
+ ```python hl_lines="2 7"
+ --8<-- "examples/parser/src/sqs_model_event.py"
+ ```
-```python
---8<-- "examples/parser/src/multiple_model_parsing.py"
-```
+=== "Sample event"
-### Built-in models
+ ```json
+ --8<-- "examples/parser/src/sqs_model_event.json"
+ ```
-Parser comes with the following built-in models:
+The example above uses `SqsModel`. Other built-in models can be found below.
| Model name | Description |
| ------------------------------------------- | ------------------------------------------------------------------------------------- |
@@ -204,156 +139,61 @@ You can extend them to include your own models, and yet have all other known fie
???+ tip
For Mypy users, we only allow type override for fields where payload is injected e.g. `detail`, `body`, etc.
-```python hl_lines="16-17 28 41" title="Extending EventBridge model as an example"
-from aws_lambda_powertools.utilities.parser import parse, BaseModel
-from aws_lambda_powertools.utilities.parser.models import EventBridgeModel
-
-from typing import List, Optional
-
-class OrderItem(BaseModel):
- id: int
- quantity: int
- description: str
-
-class Order(BaseModel):
- id: int
- description: str
- items: List[OrderItem]
-
-class OrderEventModel(EventBridgeModel):
- detail: Order
-
-payload = {
- "version": "0",
- "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718",
- "detail-type": "OrderPurchased",
- "source": "OrderService",
- "account": "111122223333",
- "time": "2020-10-22T18:43:48Z",
- "region": "us-west-1",
- "resources": ["some_additional"],
- "detail": {
- "id": 10876546789,
- "description": "My order",
- "items": [
- {
- "id": 1015938732,
- "quantity": 1,
- "description": "item xpto"
- }
- ]
- }
-}
-
-ret = parse(model=OrderEventModel, event=payload)
-
-assert ret.source == "OrderService"
-assert ret.detail.description == "My order"
-assert ret.detail_type == "OrderPurchased" # we rename it to snake_case since detail-type is an invalid name
-
-for order_item in ret.detail.items:
- ...
-```
-
-**What's going on here, you might ask**:
-
-1. We imported our built-in model `EventBridgeModel` from the parser utility
-2. Defined how our `Order` should look like
-3. Defined how part of our EventBridge event should look like by overriding `detail` key within our `OrderEventModel`
-4. Parser parsed the original event against `OrderEventModel`
+**Example: custom data model with Amazon EventBridge**
+Use the model to validate and extract relevant information from the incoming event. This can be useful when you need to handle events with a specific structure or when you want to ensure that the event data conforms to certain rules.
-???+ tip
- When extending a `string` field containing JSON, you need to wrap the field
- with [Pydantic's Json Type](https://pydantic-docs.helpmanual.io/usage/types/#json-type){target="_blank" rel="nofollow"}:
+=== "Custom data model"
- ```python hl_lines="14 18-19"
- --8<-- "examples/parser/src/extending_built_in_models_with_json_mypy.py"
+ ```python hl_lines="4 8 17"
+ --8<-- "examples/parser/src/custom_data_model_with_eventbridge.py"
```
- Alternatively, you could use a [Pydantic validator](https://pydantic-docs.helpmanual.io/usage/validators/){target="_blank" rel="nofollow"} to transform the JSON string into a dict before the mapping:
+=== "Sample event"
- ```python hl_lines="18-20 24-25"
- --8<-- "examples/parser/src/extending_built_in_models_with_json_validator.py"
+ ```json
+ --8<-- "examples/parser/src/data_model_eventbridge.json"
```
-### Envelopes
-
-When trying to parse your payloads wrapped in a known structure, you might encounter the following situations:
+## Advanced
-* Your actual payload is wrapped around a known structure, for example Lambda Event Sources like EventBridge
-* You're only interested in a portion of the payload, for example parsing the `detail` of custom events in EventBridge, or `body` of SQS records
-
-You can either solve these situations by creating a model of these known structures, parsing them, then extracting and parsing a key where your payload is.
+### Envelopes
-This can become difficult quite quickly. Parser makes this problem easier through a feature named `Envelope`.
+You can use **Envelopes** to extract specific portions of complex, nested JSON structures. This is useful when your actual payload is wrapped around a known structure, for example Lambda Event Sources like **EventBridge**.
Envelopes can be used via `envelope` parameter available in both `parse` function and `event_parser` decorator.
-Here's an example of parsing a model found in an event coming from EventBridge, where all you want is what's inside the `detail` key.
-
-```python hl_lines="18-22 25 31" title="Parsing payload in a given key only using envelope feature"
-from aws_lambda_powertools.utilities.parser import event_parser, parse, BaseModel, envelopes
-from aws_lambda_powertools.utilities.typing import LambdaContext
-
-class UserModel(BaseModel):
- username: str
- password1: str
- password2: str
-
-payload = {
- "version": "0",
- "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718",
- "detail-type": "CustomerSignedUp",
- "source": "CustomerService",
- "account": "111122223333",
- "time": "2020-10-22T18:43:48Z",
- "region": "us-west-1",
- "resources": ["some_additional_"],
- "detail": {
- "username": "universe",
- "password1": "myp@ssword",
- "password2": "repeat password"
- }
-}
-
-ret = parse(model=UserModel, envelope=envelopes.EventBridgeEnvelope, event=payload)
-
-# Parsed model only contains our actual model, not the entire EventBridge + Payload parsed
-assert ret.password1 == ret.password2
-
-# Same behaviour but using our decorator
-@event_parser(model=UserModel, envelope=envelopes.EventBridgeEnvelope)
-def handler(event: UserModel, context: LambdaContext):
- assert event.password1 == event.password2
-```
+=== "Envelopes using event parser decorator"
-**What's going on here, you might ask**:
+ ```python hl_lines="3 7-10 13"
+ --8<-- "examples/parser/src/envelope_with_event_parser.py"
+ ```
+
+=== "Sample event"
-1. We imported built-in `envelopes` from the parser utility
-2. Used `envelopes.EventBridgeEnvelope` as the envelope for our `UserModel` model
-3. Parser parsed the original event against the EventBridge model
-4. Parser then parsed the `detail` key using `UserModel`
+ ```json hl_lines="12-16"
+ --8<-- "examples/parser/src/envelope_payload.json"
+ ```
#### Built-in envelopes
-Parser comes with the following built-in envelopes, where `Model` in the return section is your given model.
-
-| Envelope name | Behaviour | Return |
-| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- |
-| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` |
-| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` |
-| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` |
-| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` |
-| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` |
-| **KinesisFirehoseEnvelope** | 1. Parses data using `KinesisFirehoseModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` |
-| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` |
-| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` |
-| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` |
-| **ApiGatewayV2Envelope** | 1. Parses data using `APIGatewayProxyEventV2Model`.
2. Parses `body` key using your model and returns it. | `Model` |
-| **LambdaFunctionUrlEnvelope** | 1. Parses data using `LambdaFunctionUrlModel`.
2. Parses `body` key using your model and returns it. | `Model` |
-| **KafkaEnvelope** | 1. Parses data using `KafkaRecordModel`.
2. Parses `value` key using your model and returns it. | `Model` |
-| **VpcLatticeEnvelope** | 1. Parses data using `VpcLatticeModel`.
2. Parses `value` key using your model and returns it. | `Model` |
-| **BedrockAgentEnvelope** | 1. Parses data using `BedrockAgentEventModel`.
2. Parses `inputText` key using your model and returns it. | `Model` |
+You can use pre-built envelopes provided by the Parser to extract and parse specific parts of complex event structures.
+
+| Envelope name | Behaviour | Return |
+| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- |
+| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`. `` 2. Parses records in `NewImage` and `OldImage` keys using your model. `` 3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` |
+| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`. ``2. Parses `detail` key using your model`` and returns it. | `Model` |
+| **SqsEnvelope** | 1. Parses data using `SqsModel`. ``2. Parses records in `body` key using your model`` and return them in a list. | `List[Model]` |
+| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it. ``2. Parses records in `message` key using your model`` and return them in a list. | `List[Model]` |
+| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it. ``2. Parses records in in `Records` key using your model`` and returns them in a list. | `List[Model]` |
+| **KinesisFirehoseEnvelope** | 1. Parses data using `KinesisFirehoseModel` which will base64 decode it. ``2. Parses records in in` Records` key using your model`` and returns them in a list. | `List[Model]` |
+| **SnsEnvelope** | 1. Parses data using `SnsModel`. ``2. Parses records in `body` key using your model`` and return them in a list. | `List[Model]` |
+| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`. `` 2. Parses SNS records in `body` key using `SnsNotificationModel`. `` 3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` |
+| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`. ``2. Parses `body` key using your model`` and returns it. | `Model` |
+| **ApiGatewayV2Envelope** | 1. Parses data using `APIGatewayProxyEventV2Model`. ``2. Parses `body` key using your model`` and returns it. | `Model` |
+| **LambdaFunctionUrlEnvelope** | 1. Parses data using `LambdaFunctionUrlModel`. ``2. Parses `body` key using your model`` and returns it. | `Model` |
+| **KafkaEnvelope** | 1. Parses data using `KafkaRecordModel`. ``2. Parses `value` key using your model`` and returns it. | `Model` |
+| **VpcLatticeEnvelope** | 1. Parses data using `VpcLatticeModel`. ``2. Parses `value` key using your model`` and returns it. | `Model` |
+| **BedrockAgentEnvelope** | 1. Parses data using `BedrockAgentEventModel`. ``2. Parses `inputText` key using your model`` and returns it. | `Model` |
#### Bringing your own envelope
@@ -361,205 +201,98 @@ You can create your own Envelope model and logic by inheriting from `BaseEnvelop
Here's a snippet of how the EventBridge envelope we demonstrated previously is implemented.
-=== "EventBridge Model"
-
- ```python
- from datetime import datetime
- from typing import Any, Dict, List
+=== "Bring your own envelope with Event Bridge"
- from aws_lambda_powertools.utilities.parser import BaseModel, Field
-
-
- class EventBridgeModel(BaseModel):
- version: str
- id: str # noqa: A003,VNE003
- source: str
- account: str
- time: datetime
- region: str
- resources: List[str]
- detail_type: str = Field(None, alias="detail-type")
- detail: Dict[str, Any]
+ ```python hl_lines="6 13-19"
+ --8<-- "examples/parser/src/bring_your_own_envelope.py"
```
-=== "EventBridge Envelope"
-
- ```python hl_lines="8 10 25 26"
- from aws_lambda_powertools.utilities.parser import BaseEnvelope, models
- from aws_lambda_powertools.utilities.parser.models import EventBridgeModel
-
- from typing import Any, Dict, Optional, TypeVar
+=== "Sample event"
- Model = TypeVar("Model", bound=BaseModel)
-
- class EventBridgeEnvelope(BaseEnvelope):
-
- def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Optional[Model]:
- """Parses data found with model provided
-
- Parameters
- ----------
- data : Dict
- Lambda event to be parsed
- model : Model
- Data model provided to parse after extracting data using envelope
-
- Returns
- -------
- Any
- Parsed detail payload with model provided
- """
- parsed_envelope = EventBridgeModel.model_validate(data)
- return self._parse(data=parsed_envelope.detail, model=model)
+ ```json
+ --8<-- "examples/parser/src/bring_your_own_envelope.json"
```
**What's going on here, you might ask**:
-1. We defined an envelope named `EventBridgeEnvelope` inheriting from `BaseEnvelope`
-2. Implemented the `parse` abstract method taking `data` and `model` as parameters
-3. Then, we parsed the incoming data with our envelope to confirm it matches EventBridge's structure defined in `EventBridgeModel`
-4. Lastly, we call `_parse` from `BaseEnvelope` to parse the data in our envelope (.detail) using the customer model
+- **EventBridgeEnvelope**: extracts the detail field from EventBridge events.
+- **OrderDetail Model**: defines and validates the structure of order data.
+- **@event_parser**: decorator automates parsing and validation of incoming events using the specified model and envelope.
### Data model validation
???+ warning
This is radically different from the **Validator utility** which validates events against JSON Schema.
-You can use parser's validator for deep inspection of object values and complex relationships.
+You can use Pydantic's validator for deep inspection of object values and complex relationships.
There are two types of class method decorators you can use:
-* **`validator`** - Useful to quickly validate an individual field and its value
-* **`root_validator`** - Useful to validate the entire model's data
+- **`field_validator`** - Useful to quickly validate an individual field and its value
+- **`model_validator`** - Useful to validate the entire model's data
Keep the following in mind regardless of which decorator you end up using it:
-* You must raise either `ValueError`, `TypeError`, or `AssertionError` when value is not compliant
-* You must return the value(s) itself if compliant
+- You must raise either `ValueError`, `TypeError`, or `AssertionError` when value is not compliant
+- You must return the value(s) itself if compliant
-#### validating fields
+#### Field Validator
-Quick validation to verify whether the field `message` has the value of `hello world`.
+Quick validation using decorator `field_validator` to verify whether the field `message` has the value of `hello world`.
-```python hl_lines="6" title="Data field validation with validator"
-from aws_lambda_powertools.utilities.parser import parse, BaseModel, validator
-
-class HelloWorldModel(BaseModel):
- message: str
-
- @validator('message')
- def is_hello_world(cls, v):
- if v != "hello world":
- raise ValueError("Message must be hello world!")
- return v
-
-parse(model=HelloWorldModel, event={"message": "hello universe"})
+```python title="field_validator.py" hl_lines="1 10-14"
+--8<-- "examples/parser/src/field_validator.py"
```
-If you run as-is, you should expect the following error with the message we provided in our exception:
+If you run using a test event `{"message": "hello universe"}` you should expect the following error with the message we provided in our exception:
-```python title="Sample validation error message"
-message
+```python
Message must be hello world! (type=value_error)
```
-Alternatively, you can pass `'*'` as an argument for the decorator so that you can validate every value available.
+#### Model validator
-```python hl_lines="7" title="Validating all data fields with custom logic"
-from aws_lambda_powertools.utilities.parser import parse, BaseModel, validator
+`model_validator` can help when you have a complex validation mechanism. For example finding whether data has been omitted or comparing field values.
-class HelloWorldModel(BaseModel):
- message: str
- sender: str
-
- @validator('*')
- def has_whitespace(cls, v):
- if ' ' not in v:
- raise ValueError("Must have whitespace...")
-
- return v
-
-parse(model=HelloWorldModel, event={"message": "hello universe", "sender": "universe"})
+```python title="model_validator.py" hl_lines="1 12-17"
+--8<-- "examples/parser/src/model_validator.py"
```
-#### validating entire model
-
-`root_validator` can help when you have a complex validation mechanism. For example finding whether data has been omitted, comparing field values, etc.
-
-```python title="Comparing and validating multiple fields at once with root_validator"
-from aws_lambda_powertools.utilities.parser import parse, BaseModel, root_validator
-
-class UserModel(BaseModel):
- username: str
- password1: str
- password2: str
-
- @root_validator
- def check_passwords_match(cls, values):
- pw1, pw2 = values.get('password1'), values.get('password2')
- if pw1 is not None and pw2 is not None and pw1 != pw2:
- raise ValueError('passwords do not match')
- return values
-
-payload = {
- "username": "universe",
- "password1": "myp@ssword",
- "password2": "repeat password"
-}
-
-parse(model=UserModel, event=payload)
-```
+1. The keyword argument `mode='after'` will cause the validator to be called after all field-level validation and parsing has been completed.
???+ info
- You can read more about validating list items, reusing validators, validating raw inputs, and a lot more in Pydantic's documentation.
+ You can read more about validating list items, reusing validators, validating raw inputs, and a lot more in [Pydantic's documentation](`https://pydantic-docs.helpmanual.io/usage/validators/`){target="_blank" rel="nofollow"}.
-### Advanced use cases
+#### String fields that contain JSON data
-???+ tip "Tip: Looking to auto-generate models from JSON, YAML, JSON Schemas, OpenApi, etc?"
- Use Koudai Aono's [data model code generation tool for Pydantic](https://github.com/koxudaxi/datamodel-code-generator){target="_blank" rel="nofollow"}
+Wrap these fields with [Pydantic's Json Type](https://pydantic-docs.helpmanual.io/usage/types/#json-type){target="_blank" rel="nofollow"}. This approach allows Pydantic to properly parse and validate the JSON content, ensuring type safety and data integrity.
-There are number of advanced use cases well documented in Pydantic's doc such as creating [immutable models](https://pydantic-docs.helpmanual.io/usage/models/#faux-immutability){target="_blank" rel="nofollow"}, [declaring fields with dynamic values](https://pydantic-docs.helpmanual.io/usage/models/#field-with-dynamic-default-value){target="_blank" rel="nofollow"}.
+=== "Validate string fields containing JSON data"
-???+ tip "Pydantic helper functions"
- Pydantic also offers [functions](https://pydantic-docs.helpmanual.io/usage/models/#helper-functions){target="_blank" rel="nofollow"} to parse models from files, dicts, string, etc.
+ ```python hl_lines="5 24"
+ --8<-- "examples/parser/src/string_fields_contain_json.py"
+ ```
-Two possible unknown use cases are Models and exception' serialization. Models have methods to [export them](https://pydantic-docs.helpmanual.io/usage/exporting_models/){target="_blank" rel="nofollow"} as `dict`, `JSON`, `JSON Schema`, and Validation exceptions can be exported as JSON.
+=== "Sample event"
-```python hl_lines="21 28-31" title="Converting data models in various formats"
-from aws_lambda_powertools.utilities import Logger
-from aws_lambda_powertools.utilities.parser import parse, BaseModel, ValidationError, validator
+ ```json
+ --8<-- "examples/parser/src/json_data_string.json"
+ ```
-logger = Logger(service="user")
+### Serialization
-class UserModel(BaseModel):
- username: str
- password1: str
- password2: str
+Models in Pydantic offer more than direct attribute access. They can be transformed, serialized, and exported in various formats.
-payload = {
- "username": "universe",
- "password1": "myp@ssword",
- "password2": "repeat password"
-}
+Pydantic's definition of _serialization_ is broader than usual. It includes converting structured objects to simpler Python types, not just data to strings or bytes. This reflects the close relationship between these processes in Pydantic.
-def my_function():
- try:
- return parse(model=UserModel, event=payload)
- except ValidationError as e:
- logger.exception(e.json())
- return {
- "status_code": 400,
- "message": "Invalid username"
- }
+Read more at [Serialization for Pydantic documentation](https://docs.pydantic.dev/latest/concepts/serialization/#model_copy){target="_blank" rel="nofollow"}.
-User: UserModel = my_function()
-user_dict = User.dict()
-user_json = User.json()
-user_json_schema_as_dict = User.schema()
-user_json_schema_as_json = User.schema_json(indent=2)
+```python title="serialization_parser.py" hl_lines="36-37"
+--8<-- "examples/parser/src/serialization_parser.py"
```
-These can be quite useful when manipulating models that later need to be serialized as inputs for services like DynamoDB, EventBridge, etc.
+???+ info
+ There are number of advanced use cases well documented in Pydantic's doc such as creating [immutable models](https://pydantic-docs.helpmanual.io/usage/models/#faux-immutability){target="_blank" rel="nofollow"}, [declaring fields with dynamic values](https://pydantic-docs.helpmanual.io/usage/models/#field-with-dynamic-default-value){target="_blank" rel="nofollow"}.
## FAQ
@@ -571,10 +304,10 @@ Parser is best suited for those looking for a trade-off between defining their m
**How do I import X from Pydantic?**
-We export most common classes, exceptions, and utilities from Pydantic as part of parser e.g. `from aws_lambda_powertools.utilities.parser import BaseModel`.
-
-If what you're trying to use isn't available as part of the high level import system, use the following escape hatch mechanism:
+We recommend importing directly from Pydantic to access all features and stay up-to-date with the latest Pydantic updates. For example:
-```python title="Pydantic import escape hatch"
-from aws_lambda_powertools.utilities.parser.pydantic import
+```python
+from pydantic import BaseModel, Field, ValidationError
```
+
+While we export some common Pydantic classes and utilities through the parser for convenience (e.g., `from aws_lambda_powertools.utilities.parser import BaseModel`), importing directly from Pydantic ensures you have access to all features and the most recent updates.
diff --git a/examples/parser/src/bring_your_own_envelope.json b/examples/parser/src/bring_your_own_envelope.json
new file mode 100644
index 00000000000..f905c7b5b16
--- /dev/null
+++ b/examples/parser/src/bring_your_own_envelope.json
@@ -0,0 +1,15 @@
+{
+ "version": "0",
+ "id": "12345678-1234-1234-1234-123456789012",
+ "detail-type": "Order Placed",
+ "source": "com.mycompany.orders",
+ "account": "123456789012",
+ "time": "2023-05-03T12:00:00Z",
+ "region": "us-west-2",
+ "resources": [],
+ "detail": {
+ "order_id": "ORD-12345",
+ "amount": 99.99,
+ "customer_id": "CUST-6789"
+ }
+}
\ No newline at end of file
diff --git a/examples/parser/src/bring_your_own_envelope.py b/examples/parser/src/bring_your_own_envelope.py
new file mode 100644
index 00000000000..1fb5dea0045
--- /dev/null
+++ b/examples/parser/src/bring_your_own_envelope.py
@@ -0,0 +1,51 @@
+import json
+from typing import Any, Dict, Optional, Type, TypeVar, Union
+
+from pydantic import BaseModel
+
+from aws_lambda_powertools.utilities.parser import BaseEnvelope, event_parser
+from aws_lambda_powertools.utilities.parser.models import EventBridgeModel
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+Model = TypeVar("Model", bound=BaseModel)
+
+
+class EventBridgeEnvelope(BaseEnvelope):
+ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Optional[Model]:
+ if data is None:
+ return None
+
+ parsed_envelope = EventBridgeModel.model_validate(data)
+ return self._parse(data=parsed_envelope.detail, model=model)
+
+
+class OrderDetail(BaseModel):
+ order_id: str
+ amount: float
+ customer_id: str
+
+
+@event_parser(model=OrderDetail, envelope=EventBridgeEnvelope)
+def lambda_handler(event: OrderDetail, context: LambdaContext):
+ try:
+ # Process the order
+ print(f"Processing order {event.order_id} for customer {event.customer_id}")
+ print(f"Order amount: ${event.amount:.2f}")
+
+ # Your business logic here
+ # For example, you might save the order to a database or trigger a payment process
+
+ return {
+ "statusCode": 200,
+ "body": json.dumps(
+ {
+ "message": f"Order {event.order_id} processed successfully",
+ "order_id": event.order_id,
+ "amount": event.amount,
+ "customer_id": event.customer_id,
+ },
+ ),
+ }
+ except Exception as e:
+ print(f"Error processing order: {str(e)}")
+ return {"statusCode": 500, "body": json.dumps({"error": "Internal server error"})}
diff --git a/examples/parser/src/custom_data_model_with_eventbridge.py b/examples/parser/src/custom_data_model_with_eventbridge.py
new file mode 100644
index 00000000000..b9d0c4593b0
--- /dev/null
+++ b/examples/parser/src/custom_data_model_with_eventbridge.py
@@ -0,0 +1,21 @@
+from pydantic import Field, ValidationError
+
+from aws_lambda_powertools.utilities.parser import parse
+from aws_lambda_powertools.utilities.parser.models import EventBridgeModel
+
+
+# Define a custom EventBridge model by extending the built-in EventBridgeModel
+class MyCustomEventBridgeModel(EventBridgeModel):
+ detail_type: str = Field(alias="detail-type")
+ source: str
+ detail: dict
+
+
+def lambda_handler(event: dict, context):
+ try:
+ # Manually parse the incoming event into the custom model
+ parsed_event: MyCustomEventBridgeModel = parse(model=MyCustomEventBridgeModel, event=event)
+
+ return {"statusCode": 200, "body": f"Event from {parsed_event.source}, type: {parsed_event.detail_type}"}
+ except ValidationError as e:
+ return {"statusCode": 400, "body": f"Validation error: {str(e)}"}
diff --git a/examples/parser/src/data_model_eventbridge.json b/examples/parser/src/data_model_eventbridge.json
new file mode 100644
index 00000000000..2e05f0f8fa7
--- /dev/null
+++ b/examples/parser/src/data_model_eventbridge.json
@@ -0,0 +1,14 @@
+{
+ "version": "0",
+ "id": "abcd-1234-efgh-5678",
+ "detail-type": "order.created",
+ "source": "my.order.service",
+ "account": "123456789012",
+ "time": "2023-09-10T12:00:00Z",
+ "region": "us-west-2",
+ "resources": [],
+ "detail": {
+ "orderId": "O-12345",
+ "amount": 100.0
+ }
+}
\ No newline at end of file
diff --git a/examples/parser/src/envelope_payload.json b/examples/parser/src/envelope_payload.json
new file mode 100644
index 00000000000..68e1a454868
--- /dev/null
+++ b/examples/parser/src/envelope_payload.json
@@ -0,0 +1,17 @@
+{
+ "version": "0",
+ "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718",
+ "detail-type": "CustomerSignedUp",
+ "source": "CustomerService",
+ "account": "111122223333",
+ "time": "2020-10-22T18:43:48Z",
+ "region": "us-west-1",
+ "resources": [
+ "some_additional_"
+ ],
+ "detail": {
+ "username": "universe",
+ "parentid_1": "12345",
+ "parentid_2": "6789"
+ }
+}
\ No newline at end of file
diff --git a/examples/parser/src/envelope_with_event_parser.py b/examples/parser/src/envelope_with_event_parser.py
new file mode 100644
index 00000000000..ba222ff1190
--- /dev/null
+++ b/examples/parser/src/envelope_with_event_parser.py
@@ -0,0 +1,20 @@
+from pydantic import BaseModel
+
+from aws_lambda_powertools.utilities.parser import envelopes, event_parser
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class UserModel(BaseModel):
+ username: str
+ parentid_1: str
+ parentid_2: str
+
+
+@event_parser(model=UserModel, envelope=envelopes.EventBridgeEnvelope)
+def lambda_handler(event: UserModel, context: LambdaContext):
+ if event.parentid_1 != event.parentid_2:
+ return {"statusCode": 400, "body": "Parent ids do not match"}
+
+ # If parentids match, proceed with user registration
+
+ return {"statusCode": 200, "body": f"User {event.username} registered successfully"}
diff --git a/examples/parser/src/example_event_parser.json b/examples/parser/src/example_event_parser.json
new file mode 100644
index 00000000000..1dcc13a2e3e
--- /dev/null
+++ b/examples/parser/src/example_event_parser.json
@@ -0,0 +1,4 @@
+{
+ "id": "12345",
+ "name": "Jane Doe"
+}
\ No newline at end of file
diff --git a/examples/parser/src/extending_built_in_models_with_json_mypy.py b/examples/parser/src/extending_built_in_models_with_json_mypy.py
deleted file mode 100644
index 813f757ad79..00000000000
--- a/examples/parser/src/extending_built_in_models_with_json_mypy.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from pydantic import BaseModel, Json
-
-from aws_lambda_powertools.utilities.parser import event_parser
-from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
-from aws_lambda_powertools.utilities.typing import LambdaContext
-
-
-class CancelOrder(BaseModel):
- order_id: int
- reason: str
-
-
-class CancelOrderModel(APIGatewayProxyEventV2Model):
- body: Json[CancelOrder] # type: ignore[assignment]
-
-
-@event_parser(model=CancelOrderModel)
-def handler(event: CancelOrderModel, context: LambdaContext):
- cancel_order: CancelOrder = event.body
-
- assert cancel_order.order_id is not None
diff --git a/examples/parser/src/extending_built_in_models_with_json_validator.py b/examples/parser/src/extending_built_in_models_with_json_validator.py
deleted file mode 100644
index acd4f3fc825..00000000000
--- a/examples/parser/src/extending_built_in_models_with_json_validator.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import json
-
-from pydantic import BaseModel, validator
-
-from aws_lambda_powertools.utilities.parser import event_parser
-from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
-from aws_lambda_powertools.utilities.typing import LambdaContext
-
-
-class CancelOrder(BaseModel):
- order_id: int
- reason: str
-
-
-class CancelOrderModel(APIGatewayProxyEventV2Model):
- body: CancelOrder # type: ignore[assignment]
-
- @validator("body", pre=True)
- def transform_body_to_dict(cls, value: str):
- return json.loads(value)
-
-
-@event_parser(model=CancelOrderModel)
-def handler(event: CancelOrderModel, context: LambdaContext):
- cancel_order: CancelOrder = event.body
-
- assert cancel_order.order_id is not None
diff --git a/examples/parser/src/field_validator.py b/examples/parser/src/field_validator.py
new file mode 100644
index 00000000000..5af46bb4f41
--- /dev/null
+++ b/examples/parser/src/field_validator.py
@@ -0,0 +1,22 @@
+from pydantic import BaseModel, field_validator
+
+from aws_lambda_powertools.utilities.parser import parse
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class HelloWorldModel(BaseModel):
+ message: str
+
+ @field_validator("message")
+ def is_hello_world(cls, v):
+ if v != "hello world":
+ raise ValueError("Message must be hello world!")
+ return v
+
+
+def lambda_handler(event: dict, context: LambdaContext):
+ try:
+ parsed_event = parse(model=HelloWorldModel, event=event)
+ return {"statusCode": 200, "body": f"Received message: {parsed_event.message}"}
+ except ValueError as e:
+ return {"statusCode": 400, "body": str(e)}
diff --git a/examples/parser/src/field_validator_all_values.py b/examples/parser/src/field_validator_all_values.py
new file mode 100644
index 00000000000..9a89b5495c4
--- /dev/null
+++ b/examples/parser/src/field_validator_all_values.py
@@ -0,0 +1,27 @@
+from aws_lambda_powertools.utilities.parser import BaseModel, field_validator, parse
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class HelloWorldModel(BaseModel):
+ message: str
+ sender: str
+
+ @field_validator("*")
+ def has_whitespace(cls, v):
+ if " " not in v:
+ raise ValueError("Must have whitespace...")
+ return v
+
+
+def lambda_handler(event: dict, context: LambdaContext):
+ try:
+ parsed_event = parse(model=HelloWorldModel, event=event)
+ return {
+ "statusCode": 200,
+ "body": f"Received message: {parsed_event.message}",
+ }
+ except ValueError as e:
+ return {
+ "statusCode": 400,
+ "body": str(e),
+ }
diff --git a/examples/parser/src/getting_started_with_parser.py b/examples/parser/src/getting_started_with_parser.py
new file mode 100644
index 00000000000..64625f8c87a
--- /dev/null
+++ b/examples/parser/src/getting_started_with_parser.py
@@ -0,0 +1,14 @@
+from pydantic import BaseModel
+
+from aws_lambda_powertools.utilities.parser import event_parser
+
+
+class MyEvent(BaseModel):
+ id: int
+ name: str
+
+
+@event_parser(model=MyEvent)
+def lambda_handler(event: MyEvent, context):
+ # if your model is valid, you can return
+ return {"statusCode": 200, "body": f"Hello {event.name}, your ID is {event.id}"}
diff --git a/examples/parser/src/json_data_string.json b/examples/parser/src/json_data_string.json
new file mode 100644
index 00000000000..9cd4ba447be
--- /dev/null
+++ b/examples/parser/src/json_data_string.json
@@ -0,0 +1,3 @@
+{
+ "body": "{\"order_id\": 12345, \"reason\": \"Changed my mind\"}"
+}
\ No newline at end of file
diff --git a/examples/parser/src/model_validator.py b/examples/parser/src/model_validator.py
new file mode 100644
index 00000000000..8f9bd2d2d77
--- /dev/null
+++ b/examples/parser/src/model_validator.py
@@ -0,0 +1,31 @@
+from pydantic import BaseModel, model_validator
+
+from aws_lambda_powertools.utilities.parser import parse
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+class UserModel(BaseModel):
+ username: str
+ parentid_1: str
+ parentid_2: str
+
+ @model_validator(mode="after") # (1)!
+ def check_parents_match(cls, values):
+ pi1, pi2 = values.get("parentid_1"), values.get("parentid_2")
+ if pi1 is not None and pi2 is not None and pi1 != pi2:
+ raise ValueError("Parent ids do not match")
+ return values
+
+
+def lambda_handler(event: dict, context: LambdaContext):
+ try:
+ parsed_event = parse(model=UserModel, event=event)
+ return {
+ "statusCode": 200,
+ "body": f"Received parent id from: {parsed_event.username}",
+ }
+ except ValueError as e:
+ return {
+ "statusCode": 400,
+ "body": str(e),
+ }
diff --git a/examples/parser/src/multiple_model_parsing.py b/examples/parser/src/multiple_model_parsing.py
deleted file mode 100644
index 556848bbff6..00000000000
--- a/examples/parser/src/multiple_model_parsing.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from typing import Any, Literal, Union
-
-from pydantic import BaseModel, Field
-from typing_extensions import Annotated
-
-from aws_lambda_powertools.utilities.parser import event_parser
-
-
-class Cat(BaseModel):
- animal: Literal["cat"]
- name: str
- meow: int
-
-
-class Dog(BaseModel):
- animal: Literal["dog"]
- name: str
- bark: int
-
-
-Animal = Annotated[
- Union[Cat, Dog],
- Field(discriminator="animal"),
-]
-
-
-@event_parser(model=Animal)
-def lambda_handler(event: Animal, _: Any) -> str:
- if isinstance(event, Cat):
- # we have a cat!
- return f"🐈: {event.name}"
-
- return f"🐶: {event.name}"
diff --git a/examples/parser/src/parser_function.py b/examples/parser/src/parser_function.py
new file mode 100644
index 00000000000..713bc2f5045
--- /dev/null
+++ b/examples/parser/src/parser_function.py
@@ -0,0 +1,19 @@
+from pydantic import BaseModel, ValidationError
+
+from aws_lambda_powertools.utilities.parser import parse
+
+
+# Define a Pydantic model for the expected structure of the input
+class MyEvent(BaseModel):
+ id: int
+ name: str
+
+
+def lambda_handler(event: dict, context):
+ try:
+ # Manually parse the incoming event into MyEvent model
+ parsed_event: MyEvent = parse(model=MyEvent, event=event)
+ return {"statusCode": 200, "body": f"Hello {parsed_event.name}, your ID is {parsed_event.id}"}
+ except ValidationError as e:
+ # Catch validation errors and return a 400 response
+ return {"statusCode": 400, "body": f"Validation error: {str(e)}"}
diff --git a/examples/parser/src/serialization_parser.py b/examples/parser/src/serialization_parser.py
new file mode 100644
index 00000000000..ed6b16ca304
--- /dev/null
+++ b/examples/parser/src/serialization_parser.py
@@ -0,0 +1,41 @@
+from pydantic import BaseModel
+
+from aws_lambda_powertools.logging import Logger
+from aws_lambda_powertools.utilities.parser import parse
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+
+class UserModel(BaseModel):
+ username: str
+ parentid_1: str
+ parentid_2: str
+
+
+def validate_user(event):
+ try:
+ user = parse(model=UserModel, event=event)
+ return {"statusCode": 200, "body": user.model_dump_json()}
+ except Exception as e:
+ logger.exception("Validation error")
+ return {"statusCode": 400, "body": str(e)}
+
+
+@logger.inject_lambda_context
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ logger.info("Received event", extra={"event": event})
+
+ result = validate_user(event)
+
+ if result["statusCode"] == 200:
+ user = UserModel.model_validate_json(result["body"])
+ logger.info("User validated successfully", extra={"username": user.username})
+
+ # Example of serialization
+ user_dict = user.model_dump()
+ user_json = user.model_dump_json()
+
+ logger.debug("User serializations", extra={"dict": user_dict, "json": user_json})
+
+ return result
diff --git a/examples/parser/src/sqs_model_event.json b/examples/parser/src/sqs_model_event.json
new file mode 100644
index 00000000000..08d6e28e0ac
--- /dev/null
+++ b/examples/parser/src/sqs_model_event.json
@@ -0,0 +1,26 @@
+{
+ "Records": [
+ {
+ "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d",
+ "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...",
+ "body": "Test message hello!",
+ "attributes": {
+ "ApproximateReceiveCount": "1",
+ "SentTimestamp": "1545082649183",
+ "SenderId": "AIDAIENQZJOLO23YVJ4VO",
+ "ApproximateFirstReceiveTimestamp": "1545082649185"
+ },
+ "messageAttributes": {
+ "testAttr": {
+ "stringValue": "100",
+ "binaryValue": "base64Str",
+ "dataType": "Number"
+ }
+ },
+ "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
+ "eventSource": "aws:sqs",
+ "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
+ "awsRegion": "us-east-2"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/examples/parser/src/sqs_model_event.py b/examples/parser/src/sqs_model_event.py
new file mode 100644
index 00000000000..8093a230df6
--- /dev/null
+++ b/examples/parser/src/sqs_model_event.py
@@ -0,0 +1,17 @@
+from aws_lambda_powertools.utilities.parser import parse
+from aws_lambda_powertools.utilities.parser.models import SqsModel
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+def lambda_handler(event: dict, context: LambdaContext) -> list:
+ parsed_event = parse(model=SqsModel, event=event)
+
+ results = []
+ for record in parsed_event.Records:
+ results.append(
+ {
+ "message_id": record.messageId,
+ "body": record.body,
+ },
+ )
+ return results
diff --git a/examples/parser/src/string_fields_contain_json.py b/examples/parser/src/string_fields_contain_json.py
new file mode 100644
index 00000000000..3055bed7e7d
--- /dev/null
+++ b/examples/parser/src/string_fields_contain_json.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from pydantic import BaseModel, Json
+
+from aws_lambda_powertools.utilities.parser import BaseEnvelope, event_parser
+from aws_lambda_powertools.utilities.parser.functions import (
+ _parse_and_validate_event,
+ _retrieve_or_set_model_from_cache,
+)
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.parser.types import T
+
+
+class CancelOrder(BaseModel):
+ order_id: int
+ reason: str
+
+
+class CancelOrderModel(BaseModel):
+ body: Json[CancelOrder]
+
+
+class CustomEnvelope(BaseEnvelope):
+ def parse(self, data: dict[str, Any] | Any | None, model: type[T]):
+ adapter = _retrieve_or_set_model_from_cache(model=model)
+ return _parse_and_validate_event(data=data, adapter=adapter)
+
+
+@event_parser(model=CancelOrderModel, envelope=CustomEnvelope)
+def lambda_handler(event: CancelOrderModel, context: LambdaContext):
+ cancel_order: CancelOrder = event.body
+
+ assert cancel_order.order_id is not None
+
+ # Process the cancel order request
+ print(f"Cancelling order {cancel_order.order_id} for reason: {cancel_order.reason}")
+
+ return {
+ "statusCode": 200,
+ "body": f"Order {cancel_order.order_id} cancelled successfully",
+ }
diff --git a/examples/parser/src/string_fields_contain_json_pydantic_validator.py b/examples/parser/src/string_fields_contain_json_pydantic_validator.py
new file mode 100644
index 00000000000..5c19606736d
--- /dev/null
+++ b/examples/parser/src/string_fields_contain_json_pydantic_validator.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import json
+from typing import TYPE_CHECKING, Any
+
+from aws_lambda_powertools.utilities.parser import BaseEnvelope, BaseModel, event_parser
+from aws_lambda_powertools.utilities.parser.functions import (
+ _parse_and_validate_event,
+ _retrieve_or_set_model_from_cache,
+)
+from aws_lambda_powertools.utilities.typing import LambdaContext
+from aws_lambda_powertools.utilities.validation import validator
+
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.parser.types import T
+
+
+class CancelOrder(BaseModel):
+ order_id: int
+ reason: str
+
+
+class CancelOrderModel(BaseModel):
+ body: CancelOrder
+
+ @validator("body", pre=True)
+ def transform_body_to_dict(cls, value):
+ return json.loads(value) if isinstance(value, str) else value
+
+
+class CustomEnvelope(BaseEnvelope):
+ def parse(self, data: dict[str, Any] | Any | None, model: type[T]):
+ adapter = _retrieve_or_set_model_from_cache(model=model)
+ return _parse_and_validate_event(data=data, adapter=adapter)
+
+
+@event_parser(model=CancelOrderModel, envelope=CustomEnvelope)
+def lambda_handler(event: CancelOrderModel, context: LambdaContext):
+ cancel_order: CancelOrder = event.body
+
+ assert cancel_order.order_id is not None
+
+ # Process the cancel order request
+ print(f"Cancelling order {cancel_order.order_id} for reason: {cancel_order.reason}")
+
+ return {
+ "statusCode": 200,
+ "body": json.dumps({"message": f"Order {cancel_order.order_id} cancelled successfully"}),
+ }
diff --git a/examples/parser/src/using_the_model_from_event.py b/examples/parser/src/using_the_model_from_event.py
deleted file mode 100644
index 41e3116c61a..00000000000
--- a/examples/parser/src/using_the_model_from_event.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import json
-
-from pydantic import BaseModel, validator
-
-from aws_lambda_powertools.utilities.parser import event_parser
-from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventV2Model
-from aws_lambda_powertools.utilities.typing import LambdaContext
-
-
-class CancelOrder(BaseModel):
- order_id: int
- reason: str
-
-
-class CancelOrderModel(APIGatewayProxyEventV2Model):
- body: CancelOrder # type: ignore[assignment]
-
- @validator("body", pre=True)
- def transform_body_to_dict(cls, value: str):
- return json.loads(value)
-
-
-@event_parser
-def handler(event: CancelOrderModel, context: LambdaContext):
- cancel_order: CancelOrder = event.body
-
- assert cancel_order.order_id is not None