From fb786c713fa96e35d0205145cf5e03a403f5fd8f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:59:00 +0000 Subject: [PATCH 01/32] chore(ci): bump version to 2.32.0 (#3653) Co-authored-by: Powertools for AWS Lambda (Python) bot --- aws_lambda_powertools/shared/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py index 2a25f6a0cb5..adcf3dc5272 100644 --- a/aws_lambda_powertools/shared/version.py +++ b/aws_lambda_powertools/shared/version.py @@ -1,3 +1,3 @@ """Exposes version constant to avoid circular dependencies.""" -VERSION = "2.31.0" +VERSION = "2.32.0" diff --git a/pyproject.toml b/pyproject.toml index af55ba1cfff..23b81905431 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.31.0" +version = "2.32.0" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] From acdf1ab1014ef7df8499a80f6747e322b9832382 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 19 Jan 2024 14:01:10 +0000 Subject: [PATCH 02/32] chore(ci): layer docs update (#3654) Co-authored-by: Powertools for AWS Lambda (Python) bot Co-authored-by: Leandro Damascena --- CHANGELOG.md | 38 ++------ docs/index.md | 142 ++++++++++++++--------------- examples/logger/sam/template.yaml | 2 +- examples/metrics/sam/template.yaml | 2 +- examples/tracer/sam/template.yaml | 2 +- 5 files changed, 80 insertions(+), 106 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7fdb78d7b88..b8442ea037e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,43 +4,16 @@ # Unreleased -## Bug Fixes - -* **event_handler:** escape OpenAPI schema on Swagger UI ([#3606](https://github.com/aws-powertools/powertools-lambda-python/issues/3606)) - -## Code Refactoring - -* **event-handler:** Inject CSS and JS files into SwaggerUI route when no custom CDN is used. ([#3562](https://github.com/aws-powertools/powertools-lambda-python/issues/3562)) -* **event_handler:** fix BedrockAgentResolver docstring ([#3645](https://github.com/aws-powertools/powertools-lambda-python/issues/3645)) - -## Documentation - -* **homepage:** add banner about Python 3.7 deprecation ([#3618](https://github.com/aws-powertools/powertools-lambda-python/issues/3618)) -* **i-made-this:** added new article on how to create a serverless API with CDK and Powertools ([#3605](https://github.com/aws-powertools/powertools-lambda-python/issues/3605)) + +## [v2.32.0] - 2024-01-19 ## Features -* **event_handler:** add support for additional response models ([#3591](https://github.com/aws-powertools/powertools-lambda-python/issues/3591)) -* **event_handler:** add support to download OpenAPI spec file ([#3571](https://github.com/aws-powertools/powertools-lambda-python/issues/3571)) -* **event_source:** Add support for S3 batch operations ([#3572](https://github.com/aws-powertools/powertools-lambda-python/issues/3572)) -* **event_source:** Add support for policyLevel field in CloudWatch Logs event and parser ([#3624](https://github.com/aws-powertools/powertools-lambda-python/issues/3624)) -* **idempotency:** adding redis as idempotency backend ([#2567](https://github.com/aws-powertools/powertools-lambda-python/issues/2567)) +* **idempotency:** leverage new DynamoDB Failed conditional writes behavior with ReturnValuesOnConditionCheckFailure ([#3446](https://github.com/aws-powertools/powertools-lambda-python/issues/3446)) ## Maintenance -* **ci:** update boto3 library version to 1.26.164+ ([#3632](https://github.com/aws-powertools/powertools-lambda-python/issues/3632)) -* **deps:** bump jinja2 from 3.1.2 to 3.1.3 in /docs ([#3620](https://github.com/aws-powertools/powertools-lambda-python/issues/3620)) -* **deps:** bump redis from 4.6.0 to 5.0.1 ([#3613](https://github.com/aws-powertools/powertools-lambda-python/issues/3613)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3639](https://github.com/aws-powertools/powertools-lambda-python/issues/3639)) -* **deps:** bump gitpython from 3.1.37 to 3.1.41 in /docs ([#3610](https://github.com/aws-powertools/powertools-lambda-python/issues/3610)) -* **deps:** bump squidfunk/mkdocs-material from `2f29d71` to `58eef6c` in /docs ([#3633](https://github.com/aws-powertools/powertools-lambda-python/issues/3633)) -* **deps-dev:** bump aws-cdk from 2.118.0 to 2.120.0 ([#3627](https://github.com/aws-powertools/powertools-lambda-python/issues/3627)) -* **deps-dev:** bump sentry-sdk from 1.39.1 to 1.39.2 ([#3614](https://github.com/aws-powertools/powertools-lambda-python/issues/3614)) -* **deps-dev:** bump ruff from 0.1.11 to 0.1.13 ([#3625](https://github.com/aws-powertools/powertools-lambda-python/issues/3625)) -* **deps-dev:** bump aws-cdk from 2.120.0 to 2.121.1 ([#3634](https://github.com/aws-powertools/powertools-lambda-python/issues/3634)) -* **deps-dev:** bump cfn-lint from 0.83.7 to 0.83.8 ([#3603](https://github.com/aws-powertools/powertools-lambda-python/issues/3603)) -* **deps-dev:** bump gitpython from 3.1.40 to 3.1.41 ([#3611](https://github.com/aws-powertools/powertools-lambda-python/issues/3611)) -* **deps-dev:** bump jinja2 from 3.1.2 to 3.1.3 ([#3619](https://github.com/aws-powertools/powertools-lambda-python/issues/3619)) +* version bump @@ -4268,7 +4241,8 @@ * Merge pull request [#5](https://github.com/aws-powertools/powertools-lambda-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.31.0...HEAD +[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.32.0...HEAD +[v2.32.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.31.0...v2.32.0 [v2.31.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.30.2...v2.31.0 [v2.30.2]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.30.1...v2.30.2 [v2.30.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.30.0...v2.30.1 diff --git a/docs/index.md b/docs/index.md index a298004d5f0..7f1ca98fb74 100644 --- a/docs/index.md +++ b/docs/index.md @@ -26,8 +26,8 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles You can install Powertools for AWS Lambda (Python) using one of the following options: -* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: -* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: +* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: +* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: * **Pip**: **[`pip install "aws-lambda-powertools"`](#){: .copyMe}:clipboard:** !!! question "Looking for Pip signed releases? [Learn more about verifying signed builds](./security.md#verifying-signed-builds)" @@ -80,67 +80,67 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `ca-west-1` | [arn:aws:lambda:ca-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:59](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `ca-west-1` | [arn:aws:lambda:ca-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:60](#){: .copyMe}:clipboard: | === "arm64" | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60](#){: .copyMe}:clipboard: | ??? note "Note: Click to expand and copy code snippets for popular frameworks" @@ -153,7 +153,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 ``` === "Serverless framework" @@ -163,7 +163,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 ``` === "CDK" @@ -179,7 +179,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -228,7 +228,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -281,7 +281,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 ❯ amplify push -y @@ -292,7 +292,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 ? Do you want to edit the local lambda function now? No ``` @@ -306,7 +306,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Properties: Architectures: [arm64] Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60 ``` === "Serverless framework" @@ -317,7 +317,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd handler: lambda_function.lambda_handler architecture: arm64 layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60 ``` === "CDK" @@ -333,7 +333,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -383,7 +383,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60"] architectures = ["arm64"] source_code_hash = filebase64sha256("lambda_function_payload.zip") @@ -439,7 +439,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60 ❯ amplify push -y @@ -450,7 +450,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:59 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:60 ? Do you want to edit the local lambda function now? No ``` @@ -458,7 +458,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index d3fc453e09a..9440f37c48f 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -14,7 +14,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 Resources: LoggerLambdaHandlerExample: diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 38e78ce18a1..e479ef1732a 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -15,7 +15,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 Resources: CaptureLambdaHandlerExample: diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index ac9bcf6f4d8..e48d27a75ea 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -13,7 +13,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:59 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:60 Resources: CaptureLambdaHandlerExample: From 5e8937903b700db58ac74df14f88a4d0cb201a88 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 16:30:04 +0000 Subject: [PATCH 03/32] chore(deps): bump pydantic from 1.10.13 to 1.10.14 (#3655) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.13 to 1.10.14. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/v1.10.14/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.13...v1.10.14) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 122 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 70 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index ab030247828..5bc2291b907 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "anyio" @@ -1250,6 +1250,17 @@ files = [ {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, + {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, + {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, @@ -1626,6 +1637,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2207,47 +2228,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -2528,6 +2549,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2535,8 +2557,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2553,6 +2583,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2560,6 +2591,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3112,20 +3144,6 @@ files = [ [package.dependencies] types-urllib3 = "*" -[[package]] -name = "types-requests" -version = "2.31.0.20231231" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-requests-2.31.0.20231231.tar.gz", hash = "sha256:0f8c0c9764773384122813548d9eea92a5c4e1f33ed54556b508968ec5065cee"}, - {file = "types_requests-2.31.0.20231231-py3-none-any.whl", hash = "sha256:2e2230c7bc8dd63fa3153c1c0ae335f8a368447f0582fc332f17d54f88e69027"}, -] - -[package.dependencies] -urllib3 = ">=2" - [[package]] name = "types-urllib3" version = "1.26.25.14" From 517acd6726ebf717a458846c59aeb1f247817ecb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 16:35:23 +0000 Subject: [PATCH 04/32] chore(deps-dev): bump ruff from 0.1.13 to 0.1.14 (#3656) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.1.13 to 0.1.14. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.1.13...v0.1.14) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5bc2291b907..ec9ea847df8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2801,28 +2801,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.1.13" +version = "0.1.14" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.13-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e3fd36e0d48aeac672aa850045e784673449ce619afc12823ea7868fcc41d8ba"}, - {file = "ruff-0.1.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9fb6b3b86450d4ec6a6732f9f60c4406061b6851c4b29f944f8c9d91c3611c7a"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b13ba5d7156daaf3fd08b6b993360a96060500aca7e307d95ecbc5bb47a69296"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9ebb40442f7b531e136d334ef0851412410061e65d61ca8ce90d894a094feb22"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226b517f42d59a543d6383cfe03cccf0091e3e0ed1b856c6824be03d2a75d3b6"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f0312ba1061e9b8c724e9a702d3c8621e3c6e6c2c9bd862550ab2951ac75c16"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2f59bcf5217c661254bd6bc42d65a6fd1a8b80c48763cb5c2293295babd945dd"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6894b00495e00c27b6ba61af1fc666f17de6140345e5ef27dd6e08fb987259d"}, - {file = "ruff-0.1.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1600942485c6e66119da294c6294856b5c86fd6df591ce293e4a4cc8e72989"}, - {file = "ruff-0.1.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ee3febce7863e231a467f90e681d3d89210b900d49ce88723ce052c8761be8c7"}, - {file = "ruff-0.1.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dcaab50e278ff497ee4d1fe69b29ca0a9a47cd954bb17963628fa417933c6eb1"}, - {file = "ruff-0.1.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f57de973de4edef3ad3044d6a50c02ad9fc2dff0d88587f25f1a48e3f72edf5e"}, - {file = "ruff-0.1.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a36fa90eb12208272a858475ec43ac811ac37e91ef868759770b71bdabe27b6"}, - {file = "ruff-0.1.13-py3-none-win32.whl", hash = "sha256:a623349a505ff768dad6bd57087e2461be8db58305ebd5577bd0e98631f9ae69"}, - {file = "ruff-0.1.13-py3-none-win_amd64.whl", hash = "sha256:f988746e3c3982bea7f824c8fa318ce7f538c4dfefec99cd09c8770bd33e6539"}, - {file = "ruff-0.1.13-py3-none-win_arm64.whl", hash = "sha256:6bbbc3042075871ec17f28864808540a26f0f79a4478c357d3e3d2284e832998"}, - {file = "ruff-0.1.13.tar.gz", hash = "sha256:e261f1baed6291f434ffb1d5c6bd8051d1c2a26958072d38dfbec39b3dda7352"}, + {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, + {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, + {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, + {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, + {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, + {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, + {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, + {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, + {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, + {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, + {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, ] [[package]] @@ -3402,4 +3402,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "6871c1fdddbdf8dd05002be4feb639430863e3c51a00bfdcdead19f255ba0693" +content-hash = "10841362fd6d21eed52d656803e056dcba30a7835c5a00dda8275eaa4e1cc41c" diff --git a/pyproject.toml b/pyproject.toml index 23b81905431..7f04dba78a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,7 @@ mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" sentry-sdk = "^1.22.2" -ruff = ">=0.0.272,<0.1.14" +ruff = ">=0.0.272,<0.1.15" retry2 = "^0.9.5" pytest-socket = "^0.6.0" types-redis = "^4.6.0.7" From a194cd2491c3c0d92d58f5cfe49645c042ea5464 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 16:35:47 +0000 Subject: [PATCH 05/32] chore(ci): changelog rebuild (#3658) Co-authored-by: Powertools for AWS Lambda (Python) bot Co-authored-by: Leandro Damascena --- CHANGELOG.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8442ea037e..b28e62b3249 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,16 +4,54 @@ # Unreleased +## Maintenance + + ## [v2.32.0] - 2024-01-19 +## Bug Fixes + +* **event_handler:** escape OpenAPI schema on Swagger UI ([#3606](https://github.com/aws-powertools/powertools-lambda-python/issues/3606)) + +## Code Refactoring + +* **event-handler:** Inject CSS and JS files into SwaggerUI route when no custom CDN is used. ([#3562](https://github.com/aws-powertools/powertools-lambda-python/issues/3562)) +* **event_handler:** fix BedrockAgentResolver docstring ([#3645](https://github.com/aws-powertools/powertools-lambda-python/issues/3645)) + +## Documentation + +* **homepage:** add banner about Python 3.7 deprecation ([#3618](https://github.com/aws-powertools/powertools-lambda-python/issues/3618)) +* **i-made-this:** added new article on how to create a serverless API with CDK and Powertools ([#3605](https://github.com/aws-powertools/powertools-lambda-python/issues/3605)) + ## Features +* **event_handler:** add support for additional response models ([#3591](https://github.com/aws-powertools/powertools-lambda-python/issues/3591)) +* **event_handler:** add support to download OpenAPI spec file ([#3571](https://github.com/aws-powertools/powertools-lambda-python/issues/3571)) +* **event_source:** Add support for S3 batch operations ([#3572](https://github.com/aws-powertools/powertools-lambda-python/issues/3572)) +* **event_source:** Add support for policyLevel field in CloudWatch Logs event and parser ([#3624](https://github.com/aws-powertools/powertools-lambda-python/issues/3624)) * **idempotency:** leverage new DynamoDB Failed conditional writes behavior with ReturnValuesOnConditionCheckFailure ([#3446](https://github.com/aws-powertools/powertools-lambda-python/issues/3446)) +* **idempotency:** adding redis as idempotency backend ([#2567](https://github.com/aws-powertools/powertools-lambda-python/issues/2567)) ## Maintenance * version bump +* **ci:** Disable Redis e2e until we drop Python 3.7 ([#3652](https://github.com/aws-powertools/powertools-lambda-python/issues/3652)) +* **ci:** update boto3 library version to 1.26.164+ ([#3632](https://github.com/aws-powertools/powertools-lambda-python/issues/3632)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3649](https://github.com/aws-powertools/powertools-lambda-python/issues/3649)) +* **deps:** bump jinja2 from 3.1.2 to 3.1.3 in /docs ([#3620](https://github.com/aws-powertools/powertools-lambda-python/issues/3620)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3639](https://github.com/aws-powertools/powertools-lambda-python/issues/3639)) +* **deps:** bump gitpython from 3.1.37 to 3.1.41 in /docs ([#3610](https://github.com/aws-powertools/powertools-lambda-python/issues/3610)) +* **deps:** bump squidfunk/mkdocs-material from `2f29d71` to `58eef6c` in /docs ([#3633](https://github.com/aws-powertools/powertools-lambda-python/issues/3633)) +* **deps:** bump redis from 4.6.0 to 5.0.1 ([#3613](https://github.com/aws-powertools/powertools-lambda-python/issues/3613)) +* **deps-dev:** bump gitpython from 3.1.40 to 3.1.41 ([#3611](https://github.com/aws-powertools/powertools-lambda-python/issues/3611)) +* **deps-dev:** bump sentry-sdk from 1.39.1 to 1.39.2 ([#3614](https://github.com/aws-powertools/powertools-lambda-python/issues/3614)) +* **deps-dev:** bump aws-cdk from 2.120.0 to 2.121.1 ([#3634](https://github.com/aws-powertools/powertools-lambda-python/issues/3634)) +* **deps-dev:** bump jinja2 from 3.1.2 to 3.1.3 ([#3619](https://github.com/aws-powertools/powertools-lambda-python/issues/3619)) +* **deps-dev:** bump cfn-lint from 0.83.7 to 0.83.8 ([#3603](https://github.com/aws-powertools/powertools-lambda-python/issues/3603)) +* **deps-dev:** bump aws-cdk from 2.121.1 to 2.122.0 ([#3648](https://github.com/aws-powertools/powertools-lambda-python/issues/3648)) +* **deps-dev:** bump ruff from 0.1.11 to 0.1.13 ([#3625](https://github.com/aws-powertools/powertools-lambda-python/issues/3625)) +* **deps-dev:** bump aws-cdk from 2.118.0 to 2.120.0 ([#3627](https://github.com/aws-powertools/powertools-lambda-python/issues/3627)) From 8b5341c38af6233c2d3e019c50891b31441ddf61 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 09:04:00 +0000 Subject: [PATCH 06/32] chore(ci): changelog rebuild (#3659) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b28e62b3249..7b3370a95e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ ## Maintenance +* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) From 2a509d0c32cde2fdbf50e76b6214e7419133ea15 Mon Sep 17 00:00:00 2001 From: Martti Aukia <122444627+maauk@users.noreply.github.com> Date: Mon, 22 Jan 2024 18:14:07 +0200 Subject: [PATCH 07/32] docs(metrics): fix empty metric warning filter (#3660) --- docs/core/metrics.md | 2 +- docs/core/metrics/datadog.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 31b4ea99ce7..19a34cf21ad 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -165,7 +165,7 @@ If you want to ensure at least one metric is always emitted, you can pass `raise ``` ???+ tip "Suppressing warning messages on empty metrics" - If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No metrics to publish*")`**. + If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No application metrics to publish*")`**. ### Capturing cold start metric diff --git a/docs/core/metrics/datadog.md b/docs/core/metrics/datadog.md index eb036fd3270..ecbdf93f7f8 100644 --- a/docs/core/metrics/datadog.md +++ b/docs/core/metrics/datadog.md @@ -142,7 +142,7 @@ Use `raise_on_empty_metrics=True` if you want to ensure at least one metric is a ``` ???+ tip "Suppressing warning messages on empty metrics" - If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No metrics to publish*")`**. + If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No application metrics to publish*")`**. ### Capturing cold start metric From 4a43a5051ab266fb6dc8043b4ef3cf77555c4b9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Jan 2024 00:08:01 +0000 Subject: [PATCH 08/32] chore(deps): bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update (#3665) chore(deps): bump the layer-balancer group Bumps the layer-balancer group in /layer/scripts/layer-balancer with 1 update: [github.com/aws/aws-sdk-go-v2/config](https://github.com/aws/aws-sdk-go-v2). Updates `github.com/aws/aws-sdk-go-v2/config` from 1.26.5 to 1.26.6 - [Release notes](https://github.com/aws/aws-sdk-go-v2/releases) - [Commits](https://github.com/aws/aws-sdk-go-v2/compare/config/v1.26.5...config/v1.26.6) --- updated-dependencies: - dependency-name: github.com/aws/aws-sdk-go-v2/config dependency-type: direct:production update-type: version-update:semver-patch dependency-group: layer-balancer ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- layer/scripts/layer-balancer/go.mod | 4 ++-- layer/scripts/layer-balancer/go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/layer/scripts/layer-balancer/go.mod b/layer/scripts/layer-balancer/go.mod index dfeb6a67fa1..4548c3f44f2 100644 --- a/layer/scripts/layer-balancer/go.mod +++ b/layer/scripts/layer-balancer/go.mod @@ -4,7 +4,7 @@ go 1.18 require ( github.com/aws/aws-sdk-go-v2 v1.24.1 - github.com/aws/aws-sdk-go-v2/config v1.26.5 + github.com/aws/aws-sdk-go-v2/config v1.26.6 github.com/aws/aws-sdk-go-v2/service/lambda v1.49.7 golang.org/x/exp v0.0.0-20230321023759-10a507213a29 golang.org/x/sync v0.6.0 @@ -16,7 +16,7 @@ require ( github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.11 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.7.2 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.10 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.18.7 // indirect diff --git a/layer/scripts/layer-balancer/go.sum b/layer/scripts/layer-balancer/go.sum index fdaf9f31ef7..ec29fc3ea00 100644 --- a/layer/scripts/layer-balancer/go.sum +++ b/layer/scripts/layer-balancer/go.sum @@ -2,8 +2,8 @@ github.com/aws/aws-sdk-go-v2 v1.24.1 h1:xAojnj+ktS95YZlDf0zxWBkbFtymPeDP+rvUQIH3 github.com/aws/aws-sdk-go-v2 v1.24.1/go.mod h1:LNh45Br1YAkEKaAqvmE1m8FUx6a5b/V0oAKV7of29b4= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.5.4 h1:OCs21ST2LrepDfD3lwlQiOqIGp6JiEUqG84GzTDoyJs= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.5.4/go.mod h1:usURWEKSNNAcAZuzRn/9ZYPT8aZQkR7xcCtunK/LkJo= -github.com/aws/aws-sdk-go-v2/config v1.26.5 h1:lodGSevz7d+kkFJodfauThRxK9mdJbyutUxGq1NNhvw= -github.com/aws/aws-sdk-go-v2/config v1.26.5/go.mod h1:DxHrz6diQJOc9EwDslVRh84VjjrE17g+pVZXUeSxaDU= +github.com/aws/aws-sdk-go-v2/config v1.26.6 h1:Z/7w9bUqlRI0FFQpetVuFYEsjzE3h7fpU6HuGmfPL/o= +github.com/aws/aws-sdk-go-v2/config v1.26.6/go.mod h1:uKU6cnDmYCvJ+pxO9S4cWDb2yWWIH5hra+32hVh1MI4= github.com/aws/aws-sdk-go-v2/credentials v1.16.16 h1:8q6Rliyv0aUFAVtzaldUEcS+T5gbadPbWdV1WcAddK8= github.com/aws/aws-sdk-go-v2/credentials v1.16.16/go.mod h1:UHVZrdUsv63hPXFo1H7c5fEneoVo9UXiz36QG1GEPi0= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.11 h1:c5I5iH+DZcH3xOIMlz3/tCKJDaHFwYEmxvlh2fAcFo8= @@ -12,8 +12,8 @@ github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10 h1:vF+Zgd9s+H4vOXd5B github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10/go.mod h1:6BkRjejp/GR4411UGqkX8+wFMbFbqsUIimfK4XjOKR4= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10 h1:nYPe006ktcqUji8S2mqXf9c/7NdiKriOwMvWQHgYztw= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10/go.mod h1:6UV4SZkVvmODfXKql4LCbaZUpF7HO2BX38FgBf9ZOLw= -github.com/aws/aws-sdk-go-v2/internal/ini v1.7.2 h1:GrSw8s0Gs/5zZ0SX+gX4zQjRnRsMJDJ2sLur1gRBhEM= -github.com/aws/aws-sdk-go-v2/internal/ini v1.7.2/go.mod h1:6fQQgfuGmw8Al/3M2IgIllycxV7ZW7WCdVSqfBeUiCY= +github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3 h1:n3GDfwqF2tzEkXlv5cuy4iy7LpKDtqDMcNLfZDu9rls= +github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3/go.mod h1:6fQQgfuGmw8Al/3M2IgIllycxV7ZW7WCdVSqfBeUiCY= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4 h1:/b31bi3YVNlkzkBrm9LfpaKoaYZUxIAj4sHfOTmLfqw= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4/go.mod h1:2aGXHFmbInwgP9ZfpmdIfOELL79zhdNYNmReK8qDfdQ= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.10 h1:DBYTXwIGQSGs9w4jKm60F5dmCQ3EEruxdc0MFh+3EY4= From f98ead0f06892dd9832c927bf21c83185909052f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 23 Jan 2024 09:54:46 +0000 Subject: [PATCH 09/32] chore(ci): changelog rebuild (#3666) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b3370a95e0..75cb3166a63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,9 +4,14 @@ # Unreleased +## Documentation + +* **metrics:** fix empty metric warning filter ([#3660](https://github.com/aws-powertools/powertools-lambda-python/issues/3660)) + ## Maintenance * **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) From 0519fa37049a0ebc4df634317d575abe17598d6a Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 23 Jan 2024 15:09:09 +0000 Subject: [PATCH 10/32] feat(event_handler): add support for multiValueQueryStringParameters in OpenAPI schema (#3667) * Initial code for multivalue querystring * Adding tests and improving code * Adding tests and improving code * Refactoging to avoid abstraction leaky * Making Pydanticv2 happy * Adding documentation * Addressing Ruben's feedback * Addressing Ruben's feedback * Mypy.... --- .../middlewares/openapi_validation.py | 35 ++- .../utilities/data_classes/alb_event.py | 9 +- .../data_classes/api_gateway_proxy_event.py | 17 ++ .../data_classes/bedrock_agent_event.py | 4 + .../utilities/data_classes/common.py | 11 + .../utilities/data_classes/vpc_lattice.py | 8 + docs/core/event_handler/api_gateway.md | 10 + .../src/working_with_multi_query_values.py | 34 +++ .../events/albMultiValueQueryStringEvent.json | 38 +++ .../lambdaFunctionUrlEventWithHeaders.json | 51 ++++ .../events/vpcLatticeV2EventWithHeaders.json | 36 +++ .../event_handler/test_openapi_params.py | 14 + .../test_openapi_validation_middleware.py | 281 +++++++++++++++++- 13 files changed, 544 insertions(+), 4 deletions(-) create mode 100644 examples/event_handler_rest/src/working_with_multi_query_values.py create mode 100644 tests/events/albMultiValueQueryStringEvent.json create mode 100644 tests/events/lambdaFunctionUrlEventWithHeaders.json create mode 100644 tests/events/vpcLatticeV2EventWithHeaders.json diff --git a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py index 34011b64384..e819947b147 100644 --- a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py +++ b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py @@ -16,6 +16,7 @@ _regenerate_error_with_loc, get_missing_field_error, ) +from aws_lambda_powertools.event_handler.openapi.dependant import is_scalar_field from aws_lambda_powertools.event_handler.openapi.encoders import jsonable_encoder from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError from aws_lambda_powertools.event_handler.openapi.params import Param @@ -68,10 +69,16 @@ def handler(self, app: EventHandlerInstance, next_middleware: NextMiddleware) -> app.context["_route_args"], ) + # Normalize query values before validate this + query_string = _normalize_multi_query_string_with_param( + app.current_event.resolved_query_string_parameters, + route.dependant.query_params, + ) + # Process query values query_values, query_errors = _request_params_to_args( route.dependant.query_params, - app.current_event.query_string_parameters or {}, + query_string, ) values.update(path_values) @@ -344,3 +351,29 @@ def _get_embed_body( received_body = {field.alias: received_body} return received_body, field_alias_omitted + + +def _normalize_multi_query_string_with_param(query_string: Optional[Dict[str, str]], params: Sequence[ModelField]): + """ + Extract and normalize resolved_query_string_parameters + + Parameters + ---------- + query_string: Dict + A dictionary containing the initial query string parameters. + params: Sequence[ModelField] + A sequence of ModelField objects representing parameters. + + Returns + ------- + A dictionary containing the processed multi_query_string_parameters. + """ + if query_string: + for param in filter(is_scalar_field, params): + try: + # if the target parameter is a scalar, we keep the first value of the query string + # regardless if there are more in the payload + query_string[param.name] = query_string[param.name][0] + except KeyError: + pass + return query_string diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 51a6f61f368..688c9567efa 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from aws_lambda_powertools.shared.headers_serializer import ( BaseHeadersSerializer, @@ -35,6 +35,13 @@ def request_context(self) -> ALBEventRequestContext: def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueQueryStringParameters") + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: + if self.multi_value_query_string_parameters: + return self.multi_value_query_string_parameters + + return self.query_string_parameters + @property def multi_value_headers(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueHeaders") diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index 5c2ef12e62c..9e013eac038 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -118,6 +118,13 @@ def multi_value_headers(self) -> Dict[str, List[str]]: def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueQueryStringParameters") + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: + if self.multi_value_query_string_parameters: + return self.multi_value_query_string_parameters + + return self.query_string_parameters + @property def request_context(self) -> APIGatewayEventRequestContext: return APIGatewayEventRequestContext(self._data) @@ -299,3 +306,13 @@ def http_method(self) -> str: def header_serializer(self): return HttpApiHeadersSerializer() + + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: + if self.query_string_parameters is not None: + query_string = { + key: value.split(",") if "," in value else value for key, value in self.query_string_parameters.items() + } + return query_string + + return {} diff --git a/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py b/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py index 9534af0e7f6..d9b45242376 100644 --- a/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py +++ b/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py @@ -108,3 +108,7 @@ def query_string_parameters(self) -> Optional[Dict[str, str]]: # In Bedrock Agent events, query string parameters are passed as undifferentiated parameters, # together with the other parameters. So we just return all parameters here. return {x["name"]: x["value"] for x in self["parameters"]} if self.get("parameters") else None + + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: + return self.query_string_parameters diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 28229c21a62..d2cf57d4af5 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -103,6 +103,17 @@ def headers(self) -> Dict[str, str]: def query_string_parameters(self) -> Optional[Dict[str, str]]: return self.get("queryStringParameters") + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: + """ + This property determines the appropriate query string parameter to be used + as a trusted source for validating OpenAPI. + + This is necessary because different resolvers use different formats to encode + multi query string parameters. + """ + return self.query_string_parameters + @property def is_base64_encoded(self) -> Optional[bool]: return self.get("isBase64Encoded") diff --git a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py index 00ba5136eec..633ce068f6e 100644 --- a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py +++ b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py @@ -141,6 +141,10 @@ def query_string_parameters(self) -> Dict[str, str]: """The request query string parameters.""" return self["query_string_parameters"] + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: + return self.query_string_parameters + class vpcLatticeEventV2Identity(DictWrapper): @property @@ -251,3 +255,7 @@ def request_context(self) -> vpcLatticeEventV2RequestContext: def query_string_parameters(self) -> Optional[Dict[str, str]]: """The request query string parameters.""" return self.get("queryStringParameters") + + @property + def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: + return self.query_string_parameters diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index a34a94975bc..86b97c87e4b 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -400,6 +400,16 @@ In the following example, we use a new `Query` OpenAPI type to add [one out of m 1. `completed` is still the same query string as before, except we simply state it's an string. No `Query` or `Annotated` to validate it. +=== "working_with_multi_query_values.py" + + If you need to handle multi-value query parameters, you can create a list of the desired type. + + ```python hl_lines="23" + --8<-- "examples/event_handler_rest/src/working_with_multi_query_values.py" + ``` + + 1. `example_multi_value_param` is a list containing values from the `ExampleEnum` enumeration. + #### Validating path parameters diff --git a/examples/event_handler_rest/src/working_with_multi_query_values.py b/examples/event_handler_rest/src/working_with_multi_query_values.py new file mode 100644 index 00000000000..7f6049dad46 --- /dev/null +++ b/examples/event_handler_rest/src/working_with_multi_query_values.py @@ -0,0 +1,34 @@ +from enum import Enum +from typing import List + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.openapi.params import Query +from aws_lambda_powertools.shared.types import Annotated +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = APIGatewayRestResolver(enable_validation=True) + + +class ExampleEnum(Enum): + """Example of an Enum class.""" + + ONE = "value_one" + TWO = "value_two" + THREE = "value_three" + + +@app.get("/todos") +def get( + example_multi_value_param: Annotated[ + List[ExampleEnum], # (1)! + Query( + description="This is multi value query parameter.", + ), + ], +): + """Return validated multi-value param values.""" + return example_multi_value_param + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/events/albMultiValueQueryStringEvent.json b/tests/events/albMultiValueQueryStringEvent.json new file mode 100644 index 00000000000..4584ba7c477 --- /dev/null +++ b/tests/events/albMultiValueQueryStringEvent.json @@ -0,0 +1,38 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:eu-central-1:1234567890:targetgroup/alb-c-Targe-11GDXTPQ7663S/804a67588bfdc10f" + } + }, + "httpMethod": "GET", + "path": "/todos", + "multiValueQueryStringParameters": { + "parameter1": ["value1","value2"], + "parameter2": ["value"] + }, + "multiValueHeaders": { + "accept": [ + "*/*" + ], + "host": [ + "alb-c-LoadB-14POFKYCLBNSF-1815800096.eu-central-1.elb.amazonaws.com" + ], + "user-agent": [ + "curl/7.79.1" + ], + "x-amzn-trace-id": [ + "Root=1-62fa9327-21cdd4da4c6db451490a5fb7" + ], + "x-forwarded-for": [ + "123.123.123.123" + ], + "x-forwarded-port": [ + "80" + ], + "x-forwarded-proto": [ + "http" + ] + }, + "body": "", + "isBase64Encoded": false +} diff --git a/tests/events/lambdaFunctionUrlEventWithHeaders.json b/tests/events/lambdaFunctionUrlEventWithHeaders.json new file mode 100644 index 00000000000..e453690d9b3 --- /dev/null +++ b/tests/events/lambdaFunctionUrlEventWithHeaders.json @@ -0,0 +1,51 @@ +{ + "version":"2.0", + "routeKey":"$default", + "rawPath":"/", + "rawQueryString":"", + "headers":{ + "sec-fetch-mode":"navigate", + "x-amzn-tls-version":"TLSv1.2", + "sec-fetch-site":"cross-site", + "accept-language":"pt-BR,pt;q=0.9", + "x-forwarded-proto":"https", + "x-forwarded-port":"443", + "x-forwarded-for":"123.123.123.123", + "sec-fetch-user":"?1", + "accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "x-amzn-tls-cipher-suite":"ECDHE-RSA-AES128-GCM-SHA256", + "sec-ch-ua":"\" Not A;Brand\";v=\"99\", \"Chromium\";v=\"102\", \"Google Chrome\";v=\"102\"", + "sec-ch-ua-mobile":"?0", + "x-amzn-trace-id":"Root=1-62ecd163-5f302e550dcde3b12402207d", + "sec-ch-ua-platform":"\"Linux\"", + "host":".lambda-url.us-east-1.on.aws", + "upgrade-insecure-requests":"1", + "cache-control":"max-age=0", + "accept-encoding":"gzip, deflate, br", + "sec-fetch-dest":"document", + "user-agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36" + }, + "queryStringParameters": { + "parameter1": "value1,value2", + "parameter2": "value" + }, + "requestContext":{ + "accountId":"anonymous", + "apiId":"", + "domainName":".lambda-url.us-east-1.on.aws", + "domainPrefix":"", + "http":{ + "method":"GET", + "path":"/", + "protocol":"HTTP/1.1", + "sourceIp":"123.123.123.123", + "userAgent":"agent" + }, + "requestId":"id", + "routeKey":"$default", + "stage":"$default", + "time":"05/Aug/2022:08:14:39 +0000", + "timeEpoch":1659687279885 + }, + "isBase64Encoded":false +} diff --git a/tests/events/vpcLatticeV2EventWithHeaders.json b/tests/events/vpcLatticeV2EventWithHeaders.json new file mode 100644 index 00000000000..11b36ef118b --- /dev/null +++ b/tests/events/vpcLatticeV2EventWithHeaders.json @@ -0,0 +1,36 @@ +{ + "version": "2.0", + "path": "/newpath", + "method": "GET", + "headers": { + "user_agent": "curl/7.64.1", + "x-forwarded-for": "10.213.229.10", + "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws", + "accept": "*/*" + }, + "queryStringParameters": { + "parameter1": [ + "value1", + "value2" + ], + "parameter2": [ + "value" + ] + }, + "body": "{\"message\": \"Hello from Lambda!\"}", + "isBase64Encoded": false, + "requestContext": { + "serviceNetworkArn": "arn:aws:vpc-lattice:us-east-2:123456789012:servicenetwork/sn-0bf3f2882e9cc805a", + "serviceArn": "arn:aws:vpc-lattice:us-east-2:123456789012:service/svc-0a40eebed65f8d69c", + "targetGroupArn": "arn:aws:vpc-lattice:us-east-2:123456789012:targetgroup/tg-6d0ecf831eec9f09", + "identity": { + "sourceVpcArn": "arn:aws:ec2:region:123456789012:vpc/vpc-0b8276c84697e7339", + "type" : "AWS_IAM", + "principal": "arn:aws:sts::123456789012:assumed-role/example-role/057d00f8b51257ba3c853a0f248943cf", + "sessionName": "057d00f8b51257ba3c853a0f248943cf", + "x509SanDns": "example.com" + }, + "region": "us-east-2", + "timeEpoch": "1696331543569073" + } +} diff --git a/tests/functional/event_handler/test_openapi_params.py b/tests/functional/event_handler/test_openapi_params.py index 0f06524ea6d..2f48f5aa534 100644 --- a/tests/functional/event_handler/test_openapi_params.py +++ b/tests/functional/event_handler/test_openapi_params.py @@ -184,6 +184,20 @@ def handler(page: Annotated[str, Query(include_in_schema=False)]): assert get.parameters is None +def test_openapi_with_list_param(): + app = APIGatewayRestResolver() + + @app.get("/") + def handler(page: Annotated[List[str], Query()]): + return page + + schema = app.get_openapi_schema() + assert len(schema.paths.keys()) == 1 + + get = schema.paths["/"].get + assert get.parameters[0].schema_.type == "array" + + def test_openapi_with_description(): app = APIGatewayRestResolver() diff --git a/tests/functional/event_handler/test_openapi_validation_middleware.py b/tests/functional/event_handler/test_openapi_validation_middleware.py index f558bd23ced..ea4305257d4 100644 --- a/tests/functional/event_handler/test_openapi_validation_middleware.py +++ b/tests/functional/event_handler/test_openapi_validation_middleware.py @@ -6,12 +6,23 @@ from pydantic import BaseModel -from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response -from aws_lambda_powertools.event_handler.openapi.params import Body +from aws_lambda_powertools.event_handler import ( + ALBResolver, + APIGatewayHttpResolver, + APIGatewayRestResolver, + LambdaFunctionUrlResolver, + Response, + VPCLatticeV2Resolver, +) +from aws_lambda_powertools.event_handler.openapi.params import Body, Query from aws_lambda_powertools.shared.types import Annotated from tests.functional.utils import load_event LOAD_GW_EVENT = load_event("apiGatewayProxyEvent.json") +LOAD_GW_EVENT_HTTP = load_event("apiGatewayProxyV2Event.json") +LOAD_GW_EVENT_ALB = load_event("albMultiValueQueryStringEvent.json") +LOAD_GW_EVENT_LAMBDA_URL = load_event("lambdaFunctionUrlEventWithHeaders.json") +LOAD_GW_EVENT_VPC_LATTICE = load_event("vpcLatticeV2EventWithHeaders.json") def test_validate_scalars(): @@ -378,3 +389,269 @@ def handler(user: Model) -> Response[Model]: result = app(LOAD_GW_EVENT, {}) assert result["statusCode"] == 422 assert "missing" in result["body"] + + +def test_validate_rest_api_resolver_with_multi_query_params(): + # GIVEN an APIGatewayRestResolver with validation enabled + app = APIGatewayRestResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT["httpMethod"] = "GET" + LOAD_GW_EVENT["path"] = "/users" + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT, {}) + assert result["statusCode"] == 200 + + +def test_validate_rest_api_resolver_with_multi_query_params_fail(): + # GIVEN an APIGatewayRestResolver with validation enabled + app = APIGatewayRestResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT["httpMethod"] = "GET" + LOAD_GW_EVENT["path"] = "/users" + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT, {}) + assert result["statusCode"] == 422 + assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + + +def test_validate_rest_api_resolver_without_query_params(): + # GIVEN an APIGatewayRestResolver with validation enabled + app = APIGatewayRestResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(): + return None + + LOAD_GW_EVENT["httpMethod"] = "GET" + LOAD_GW_EVENT["path"] = "/users" + LOAD_GW_EVENT["queryStringParameters"] = None + LOAD_GW_EVENT["multiValueQueryStringParameters"] = None + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT, {}) + assert result["statusCode"] == 200 + + +def test_validate_http_resolver_with_multi_query_params(): + # GIVEN an APIGatewayHttpResolver with validation enabled + app = APIGatewayHttpResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_HTTP["rawPath"] = "/users" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_HTTP, {}) + assert result["statusCode"] == 200 + + +def test_validate_http_resolver_with_multi_query_values_fail(): + # GIVEN an APIGatewayHttpResolver with validation enabled + app = APIGatewayHttpResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_HTTP["rawPath"] = "/users" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT_HTTP, {}) + assert result["statusCode"] == 422 + assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + + +def test_validate_http_resolver_without_query_params(): + # GIVEN an APIGatewayHttpResolver with validation enabled + app = APIGatewayHttpResolver(enable_validation=True) + + # WHEN a handler is defined without any query params + @app.get("/users") + def handler(): + return None + + LOAD_GW_EVENT_HTTP["rawPath"] = "/users" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + LOAD_GW_EVENT_HTTP["queryStringParameters"] = None + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_HTTP, {}) + assert result["statusCode"] == 200 + + +def test_validate_alb_resolver_with_multi_query_values(): + # GIVEN an ALBResolver with validation enabled + app = ALBResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_ALB["path"] = "/users" + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_ALB, {}) + assert result["statusCode"] == 200 + + +def test_validate_alb_resolver_with_multi_query_values_fail(): + # GIVEN an ALBResolver with validation enabled + app = ALBResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_ALB["path"] = "/users" + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT_ALB, {}) + assert result["statusCode"] == 422 + assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + + +def test_validate_alb_resolver_without_query_params(): + # GIVEN an ALBResolver with validation enabled + app = ALBResolver(enable_validation=True) + + # WHEN a handler is defined without any query params + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_ALB["path"] = "/users" + LOAD_GW_EVENT_HTTP["multiValueQueryStringParameters"] = None + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_ALB, {}) + assert result["statusCode"] == 200 + + +def test_validate_lambda_url_resolver_with_multi_query_params(): + # GIVEN an LambdaFunctionUrlResolver with validation enabled + app = LambdaFunctionUrlResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) + assert result["statusCode"] == 200 + + +def test_validate_lambda_url_resolver_with_multi_query_params_fail(): + # GIVEN an LambdaFunctionUrlResolver with validation enabled + app = LambdaFunctionUrlResolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) + assert result["statusCode"] == 422 + assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + + +def test_validate_lambda_url_resolver_without_query_params(): + # GIVEN an LambdaFunctionUrlResolver with validation enabled + app = LambdaFunctionUrlResolver(enable_validation=True) + + # WHEN a handler is defined without any query params + @app.get("/users") + def handler(): + return None + + LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + LOAD_GW_EVENT_LAMBDA_URL["queryStringParameters"] = None + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) + assert result["statusCode"] == 200 + + +def test_validate_vpc_lattice_resolver_with_multi_params_values(): + # GIVEN an VPCLatticeV2Resolver with validation enabled + app = VPCLatticeV2Resolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list + @app.get("/users") + def handler(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) + assert result["statusCode"] == 200 + + +def test_validate_vpc_lattice_resolver_with_multi_query_params_fail(): + # GIVEN an VPCLatticeV2Resolver with validation enabled + app = VPCLatticeV2Resolver(enable_validation=True) + + # WHEN a handler is defined with a default scalar parameter and a list with wrong type + @app.get("/users") + def handler(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + + # THEN the handler should be invoked and return 422 + result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) + assert result["statusCode"] == 422 + assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + + +def test_validate_vpc_lattice_resolver_without_query_params(): + # GIVEN an VPCLatticeV2Resolver with validation enabled + app = VPCLatticeV2Resolver(enable_validation=True) + + # WHEN a handler is defined without any query params + @app.get("/users") + def handler(): + return None + + LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + LOAD_GW_EVENT_VPC_LATTICE["queryStringParameters"] = None + + # THEN the handler should be invoked and return 200 + result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) + assert result["statusCode"] == 200 From e1ca5842ad828588fcf2af538fdc88404418cf97 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 24 Jan 2024 08:51:26 +0000 Subject: [PATCH 11/32] chore(ci): changelog rebuild (#3669) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 75cb3166a63..9776e3cbe93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,10 @@ * **metrics:** fix empty metric warning filter ([#3660](https://github.com/aws-powertools/powertools-lambda-python/issues/3660)) +## Features + +* **event_handler:** add support for multiValueQueryStringParameters in OpenAPI schema ([#3667](https://github.com/aws-powertools/powertools-lambda-python/issues/3667)) + ## Maintenance * **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) From aac14dc277f57cfb05ef6d37bca19d3756cb7632 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jan 2024 10:31:04 +0000 Subject: [PATCH 12/32] chore(deps): bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs (#3670) chore(deps): bump squidfunk/mkdocs-material in /docs Bumps squidfunk/mkdocs-material from `58eef6c` to `9aad7af`. --- updated-dependencies: - dependency-name: squidfunk/mkdocs-material dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Dockerfile b/docs/Dockerfile index 1da6d6b491c..ce3cae8a9a3 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,5 +1,5 @@ # v9.1.18 -FROM squidfunk/mkdocs-material@sha256:58eef6c68ad4c8687f7d43c560852a8f62d403126c90c919242dec93bd3eee68 +FROM squidfunk/mkdocs-material@sha256:9aad7af2f62950826f57928e984ea8aa77a561f67b7f5fc251ced67d52a2a5fe # pip-compile --generate-hashes --output-file=requirements.txt requirements.in COPY requirements.txt /tmp/ RUN pip install --require-hashes -r /tmp/requirements.txt From 83ba2d154183a111a73e39110e9d66ff2d70b7c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jan 2024 22:14:42 +0000 Subject: [PATCH 13/32] chore(deps-dev): bump aws-cdk from 2.122.0 to 2.123.0 (#3673) Bumps [aws-cdk](https://github.com/aws/aws-cdk/tree/HEAD/packages/aws-cdk) from 2.122.0 to 2.123.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/commits/v2.123.0/packages/aws-cdk) --- updated-dependencies: - dependency-name: aws-cdk dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9c1f3cb3ee8..24659a4e69b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "package-lock.json": "^1.0.0" }, "devDependencies": { - "aws-cdk": "^2.122.0" + "aws-cdk": "^2.123.0" } }, "node_modules/aws-cdk": { - "version": "2.122.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.122.0.tgz", - "integrity": "sha512-WqiVTedcuW4LjH4WqtQncliUdeDa9j9xgu3II8Qd1HmCZotbzBorYIHDvOJ+m3ovIzd9DL+hNq9PPUqxtBe0VQ==", + "version": "2.123.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.123.0.tgz", + "integrity": "sha512-JvGNN1FobSaGwirJJQZ1oIkaHFfQoLbRyuxzFNQSs2wlVltwFb1VdR7FNxh0sVzugM2RsYQu8xQPUa53ZnDlyg==", "dev": true, "bin": { "cdk": "bin/cdk" diff --git a/package.json b/package.json index b2065da493f..cfa167558d4 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.122.0" + "aws-cdk": "^2.123.0" }, "dependencies": { "package-lock.json": "^1.0.0" From d112af7a1d146f96983712004b156afc34b021b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jan 2024 22:15:12 +0000 Subject: [PATCH 14/32] chore(deps): bump codecov/codecov-action from 3.1.4 to 3.1.5 (#3674) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/eaaf4bedf32dbdc6b720b63067d99c4d77d6047d...4fe8c5f003fae66aa5ebb77cfd3e7bfbbda0b6b0) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- .github/workflows/quality_check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml index 06912442553..db54c571509 100644 --- a/.github/workflows/quality_check.yml +++ b/.github/workflows/quality_check.yml @@ -71,7 +71,7 @@ jobs: - name: Complexity baseline run: make complexity-baseline - name: Upload coverage to Codecov - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # 3.1.4 + uses: codecov/codecov-action@4fe8c5f003fae66aa5ebb77cfd3e7bfbbda0b6b0 # 3.1.5 with: file: ./coverage.xml env_vars: PYTHON From 3268c71f5e0498b60198b0422c1346c1570b05b2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 26 Jan 2024 09:21:00 +0000 Subject: [PATCH 15/32] chore(ci): changelog rebuild (#3675) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9776e3cbe93..e0fbb34b687 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,8 +14,11 @@ ## Maintenance +* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) +* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) * **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) * **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) +* **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) From b2c1047e11bd8f39e0a0f7a877e21c963eb6d91f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 27 Jan 2024 17:26:39 +0000 Subject: [PATCH 16/32] chore(ci): changelog rebuild (#3676) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0fbb34b687..9342503c3ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,8 +15,8 @@ ## Maintenance * **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) -* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) * **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) * **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) * **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) From a7c2a7506d48000fad6862a14ceae23adae5800d Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 29 Jan 2024 16:47:44 +0000 Subject: [PATCH 17/32] fix(event-handler): strip whitespace from Content-Type headers during OpenAPI schema validation (#3677) Fixing problems with spaces in header --- .../middlewares/openapi_validation.py | 2 +- .../test_openapi_validation_middleware.py | 26 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py index e819947b147..fd7507603de 100644 --- a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py +++ b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py @@ -212,7 +212,7 @@ def _get_body(self, app: EventHandlerInstance) -> Dict[str, Any]: """ content_type_value = app.current_event.get_header_value("content-type") - if not content_type_value or content_type_value.startswith("application/json"): + if not content_type_value or content_type_value.strip().startswith("application/json"): try: return app.current_event.json_body except json.JSONDecodeError as e: diff --git a/tests/functional/event_handler/test_openapi_validation_middleware.py b/tests/functional/event_handler/test_openapi_validation_middleware.py index ea4305257d4..23fa131ab9f 100644 --- a/tests/functional/event_handler/test_openapi_validation_middleware.py +++ b/tests/functional/event_handler/test_openapi_validation_middleware.py @@ -289,6 +289,32 @@ def handler(user: Model) -> Model: assert json.loads(result["body"]) == {"name": "John", "age": 30} +def test_validate_body_param_with_stripped_headers(): + # GIVEN an APIGatewayRestResolver with validation enabled + app = APIGatewayRestResolver(enable_validation=True) + + class Model(BaseModel): + name: str + age: int + + # WHEN a handler is defined with a body parameter + # WHEN headers has spaces + @app.post("/") + def handler(user: Model) -> Model: + return user + + LOAD_GW_EVENT["httpMethod"] = "POST" + LOAD_GW_EVENT["headers"] = {"Content-type": " application/json "} + LOAD_GW_EVENT["path"] = "/" + LOAD_GW_EVENT["body"] = json.dumps({"name": "John", "age": 30}) + + # THEN the handler should be invoked and return 200 + # THEN the body must be a JSON object + result = app(LOAD_GW_EVENT, {}) + assert result["statusCode"] == 200 + assert json.loads(result["body"]) == {"name": "John", "age": 30} + + def test_validate_body_param_with_invalid_date(): # GIVEN an APIGatewayRestResolver with validation enabled app = APIGatewayRestResolver(enable_validation=True) From e8daf9e9381da0841709f60c9b8a7dd27b687482 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 23:05:29 +0000 Subject: [PATCH 18/32] chore(deps): bump squidfunk/mkdocs-material from `9aad7af` to `a4a2029` in /docs (#3679) chore(deps): bump squidfunk/mkdocs-material in /docs Bumps squidfunk/mkdocs-material from `9aad7af` to `a4a2029`. --- updated-dependencies: - dependency-name: squidfunk/mkdocs-material dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Dockerfile b/docs/Dockerfile index ce3cae8a9a3..82bf5a39aeb 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,5 +1,5 @@ # v9.1.18 -FROM squidfunk/mkdocs-material@sha256:9aad7af2f62950826f57928e984ea8aa77a561f67b7f5fc251ced67d52a2a5fe +FROM squidfunk/mkdocs-material@sha256:a4a2029fdf524f0c727852e492cd2bbae30cc9471959da60d7dc46bf565a521b # pip-compile --generate-hashes --output-file=requirements.txt requirements.in COPY requirements.txt /tmp/ RUN pip install --require-hashes -r /tmp/requirements.txt From ee78db0b4a8bc3662a1d9079906a5272b436ec19 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 23:14:11 +0000 Subject: [PATCH 19/32] chore(deps-dev): bump aws-cdk from 2.123.0 to 2.124.0 (#3678) Bumps [aws-cdk](https://github.com/aws/aws-cdk/tree/HEAD/packages/aws-cdk) from 2.123.0 to 2.124.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/v2.124.0/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/commits/v2.124.0/packages/aws-cdk) --- updated-dependencies: - dependency-name: aws-cdk dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 24659a4e69b..cdfbdeceff1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "package-lock.json": "^1.0.0" }, "devDependencies": { - "aws-cdk": "^2.123.0" + "aws-cdk": "^2.124.0" } }, "node_modules/aws-cdk": { - "version": "2.123.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.123.0.tgz", - "integrity": "sha512-JvGNN1FobSaGwirJJQZ1oIkaHFfQoLbRyuxzFNQSs2wlVltwFb1VdR7FNxh0sVzugM2RsYQu8xQPUa53ZnDlyg==", + "version": "2.124.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.124.0.tgz", + "integrity": "sha512-kUOfqwIAaTEx4ZozojZEhWa8G+O9KU+P0tERtDVmTw9ip4QXNMwTTkjj/IPtoH8qfXGdeibTQ9MJwRvHOR8kXQ==", "dev": true, "bin": { "cdk": "bin/cdk" diff --git a/package.json b/package.json index cfa167558d4..bf23efc23e3 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.123.0" + "aws-cdk": "^2.124.0" }, "dependencies": { "package-lock.json": "^1.0.0" From fe70d9132a5ea63f873a1172c5cb5e29bce9faa5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 08:37:45 +0000 Subject: [PATCH 20/32] chore(ci): changelog rebuild (#3680) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9342503c3ca..9eacdda2d41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ # Unreleased +## Bug Fixes + +* **event-handler:** strip whitespace from Content-Type headers during OpenAPI schema validation ([#3677](https://github.com/aws-powertools/powertools-lambda-python/issues/3677)) + ## Documentation * **metrics:** fix empty metric warning filter ([#3660](https://github.com/aws-powertools/powertools-lambda-python/issues/3660)) @@ -15,11 +19,13 @@ ## Maintenance * **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) -* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) -* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) * **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) +* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) +* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps:** bump squidfunk/mkdocs-material from `9aad7af` to `a4a2029` in /docs ([#3679](https://github.com/aws-powertools/powertools-lambda-python/issues/3679)) * **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) +* **deps-dev:** bump aws-cdk from 2.123.0 to 2.124.0 ([#3678](https://github.com/aws-powertools/powertools-lambda-python/issues/3678)) From fd4903fc95be73eed38281a9b23173067732fa42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 20:44:47 +0000 Subject: [PATCH 21/32] chore(deps): bump codecov/codecov-action from 3.1.5 to 3.1.6 (#3683) --- .github/workflows/quality_check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml index db54c571509..40ccbe99887 100644 --- a/.github/workflows/quality_check.yml +++ b/.github/workflows/quality_check.yml @@ -71,7 +71,7 @@ jobs: - name: Complexity baseline run: make complexity-baseline - name: Upload coverage to Codecov - uses: codecov/codecov-action@4fe8c5f003fae66aa5ebb77cfd3e7bfbbda0b6b0 # 3.1.5 + uses: codecov/codecov-action@ab904c41d6ece82784817410c45d8b8c02684457 # 3.1.6 with: file: ./coverage.xml env_vars: PYTHON From 6a06a6693428c2e6323712be918f0f78f00af97c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 22:08:09 +0000 Subject: [PATCH 22/32] chore(deps-dev): bump sentry-sdk from 1.39.2 to 1.40.0 (#3684) Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.39.2 to 1.40.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.39.2...1.40.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index ec9ea847df8..8e41d5f459a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2859,13 +2859,13 @@ pbr = "*" [[package]] name = "sentry-sdk" -version = "1.39.2" +version = "1.40.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.39.2.tar.gz", hash = "sha256:24c83b0b41c887d33328a9166f5950dc37ad58f01c9f2fbff6b87a6f1094170c"}, - {file = "sentry_sdk-1.39.2-py2.py3-none-any.whl", hash = "sha256:acaf597b30258fc7663063b291aa99e58f3096e91fe1e6634f4b79f9c1943e8e"}, + {file = "sentry-sdk-1.40.0.tar.gz", hash = "sha256:34ad8cfc9b877aaa2a8eb86bfe5296a467fffe0619b931a05b181c45f6da59bf"}, + {file = "sentry_sdk-1.40.0-py2.py3-none-any.whl", hash = "sha256:78575620331186d32f34b7ece6edea97ce751f58df822547d3ab85517881a27a"}, ] [package.dependencies] @@ -2891,7 +2891,7 @@ huey = ["huey (>=2)"] loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure_eval"] +pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] From 7d88b5477d74237c3337cb7e1f1e561e765298b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 22:15:57 +0000 Subject: [PATCH 23/32] chore(deps-dev): bump ruff from 0.1.14 to 0.1.15 (#3685) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.1.14 to 0.1.15. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.1.14...v0.1.15) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8e41d5f459a..b6bf62d37bb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2801,28 +2801,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.1.14" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -3402,4 +3402,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "10841362fd6d21eed52d656803e056dcba30a7835c5a00dda8275eaa4e1cc41c" +content-hash = "f4c66a8fa656902aba0c04cc8b5dc236d7f0ed6f7c3e22507cc89e711b0b62b2" diff --git a/pyproject.toml b/pyproject.toml index 7f04dba78a4..0e576d412df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,7 @@ mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" sentry-sdk = "^1.22.2" -ruff = ">=0.0.272,<0.1.15" +ruff = ">=0.0.272,<0.1.16" retry2 = "^0.9.5" pytest-socket = "^0.6.0" types-redis = "^4.6.0.7" From b591bb8c85db7c36f088a9db3ef1b6e1788bab39 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 09:05:12 +0000 Subject: [PATCH 24/32] chore(ci): changelog rebuild (#3686) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9eacdda2d41..479bde82545 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,14 +18,17 @@ ## Maintenance -* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) * **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) -* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) * **deps:** bump squidfunk/mkdocs-material from `9aad7af` to `a4a2029` in /docs ([#3679](https://github.com/aws-powertools/powertools-lambda-python/issues/3679)) +* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps:** bump codecov/codecov-action from 3.1.5 to 3.1.6 ([#3683](https://github.com/aws-powertools/powertools-lambda-python/issues/3683)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) +* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) * **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) * **deps-dev:** bump aws-cdk from 2.123.0 to 2.124.0 ([#3678](https://github.com/aws-powertools/powertools-lambda-python/issues/3678)) +* **deps-dev:** bump sentry-sdk from 1.39.2 to 1.40.0 ([#3684](https://github.com/aws-powertools/powertools-lambda-python/issues/3684)) +* **deps-dev:** bump ruff from 0.1.14 to 0.1.15 ([#3685](https://github.com/aws-powertools/powertools-lambda-python/issues/3685)) From e1a4d1ec0b422a5ba16249aedde14543c2f3ec2f Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 31 Jan 2024 09:40:39 +0000 Subject: [PATCH 25/32] docs(proccess): add versioning and maintenance policy (#3682) * Adding versioning policy * docs: update wording to reflect Powertools for AWS Lambda * docs: adjust timelines, refer to tight coupling w/ Lambda runtime policy * docs: fix grammar * Adressing Andrea's feedback * docs: GA minimal commitment adjustment to allow each language to set their baseline * docs: final line editing --------- Co-authored-by: heitorlessa --- docs/versioning.md | 63 ++++++++++++++++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 64 insertions(+) create mode 100644 docs/versioning.md diff --git a/docs/versioning.md b/docs/versioning.md new file mode 100644 index 00000000000..44349f4bfc2 --- /dev/null +++ b/docs/versioning.md @@ -0,0 +1,63 @@ +--- +title: Versioning and maintenance policy +description: Versioning and maintenance policy for Powertools for AWS Lambda (Python) +--- + + + +### Overview + +This document outlines the maintenance policy for Powertools for AWS Lambda and their underlying dependencies. AWS regularly provides Powertools for AWS Lambda with updates that may contain new features, enhancements, bug fixes, security patches, or documentation updates. Updates may also address changes with dependencies, language runtimes, and operating systems. Powertools for AWS Lambda is published to package managers (e.g. PyPi, NPM, Maven, NuGet), and are available as source code on GitHub. + +We recommend users to stay up-to-date with Powertools for AWS Lambda releases to keep up with the latest features, security updates, and underlying dependencies. Continued use of an unsupported Powertools for AWS Lambda version is not recommended and is done at the user’s discretion. + +!!! info "For brevity, we will interchangeably refer to Powertools for AWS Lambda as "SDK" _(Software Development Toolkit)_." + +### Versioning + +Powertools for AWS Lambda release versions are in the form of X.Y.Z where X represents the major version. Increasing the major version of an SDK indicates that this SDK underwent significant and substantial changes to support new idioms and patterns in the language. Major versions are introduced when public interfaces _(e.g. classes, methods, types, etc.)_, behaviors, or semantics have changed. Applications need to be updated in order for them to work with the newest SDK version. It is important to update major versions carefully and in accordance with the upgrade guidelines provided by AWS. + +### SDK major version lifecycle + +The lifecycle for major Powertools for AWS Lambda versions consists of 5 phases, which are outlined below. + +* **Developer Preview** (Phase 0) - During this phase, SDKs are not supported, should not be used in production environments, and are meant for early access and feedback purposes only. It is possible for future releases to introduce breaking changes. Once AWS identifies a release to be a stable product, it may mark it as a Release Candidate. Release Candidates are ready for GA release unless significant bugs emerge, and will receive full AWS support. +* **General Availability (GA)** (Phase 1) - During this phase, SDKs are fully supported. AWS will provide regular SDK releases that include support for new features, enhancements, as well as bug and security fixes. AWS will support the GA version of an SDK for _at least 24 months_, unless otherwise specified. +* **Maintenance Announcement** (Phase 2) - AWS will make a public announcement at least 6 months before an SDK enters maintenance mode. During this period, the SDK will continue to be fully supported. Typically, maintenance mode is announced at the same time as the next major version is transitioned to GA. +* **Maintenance** (Phase 3) - During the maintenance mode, AWS limits SDK releases to address critical bug fixes and security issues only. An SDK will not receive API updates for new or existing services, or be updated to support new regions. Maintenance mode has a _default duration of 6 months_, unless otherwise specified. +* **End-of-Support** (Phase 4) - When an SDK reaches end-of support, it will no longer receive updates or releases. Previously published releases will continue to be available via public package managers and the code will remain on GitHub. The GitHub repository may be archived. Use of an SDK which has reached end-of-support is done at the user’s discretion. We recommend users upgrade to the new major version. + +!!! note "Please note that the timelines shown below are illustrative and not binding" + +![Maintenance policy timelines](https://docs.aws.amazon.com/images/sdkref/latest/guide/images/maint-policy.png) + +### Dependency lifecycle + +Most AWS SDKs have underlying dependencies, such as language runtimes, AWS Lambda runtime, or third party libraries and frameworks. These dependencies are typically tied to the language community or the vendor who owns that particular component. Each community or vendor publishes their own end-of-support schedule for their product. + +The following terms are used to classify underlying third party dependencies: + +* [**AWS Lambda Runtime**](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html): Examples include `nodejs20.x`, `python3.12`, etc. +* **Language Runtime**: Examples include Python 3.12, NodeJS 20, Java 17, .NET Core, etc. +* **Third party Library**: Examples include Pydantic, AWS X-Ray SDK, AWS Encryption SDK, Middy.js, etc. + +Powertools for AWS Lambda follows the [AWS Lambda Runtime deprecation policy cycle](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html#runtime-support-policy), when it comes to Language Runtime. This means we will stop supporting their respective deprecated Language Runtime _(e.g., `python37`)_ without increasing the major SDK version. + +!!! note "AWS reserves the right to stop support for an underlying dependency without increasing the major SDK version" + +### Communication methods + +Maintenance announcements are communicated in several ways: + +* A pinned GitHub Request For Comments (RFC) issue indicating the campaign for the next major version. The RFC will outline the path to end-of-support, specify campaign timelines, and upgrade guidance. +* AWS SDK documentation, such as API reference documentation, user guides, SDK product marketing pages, and GitHub readme(s) are updated to indicate the campaign timeline and provide guidance on upgrading affected applications. +* Deprecation warnings are added to the SDKs, outlining the path to end-of-support and linking to the upgrade guide. + +To see the list of available major versions of Powertools for AWS Lambda and where they are in their maintenance lifecycle, see [version support matrix](#version-support-matrix) + +### Version support matrix + +| SDK | Major version | Current Phase | General Availability Date | Notes | +| -------------------------------- | ------------- | -------------------- | ------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Powertools for AWS Lambda Python | 2.x | General Availability | 10/24/2022 | See [Release Notes](https://github.com/aws-powertools/powertools-lambda-python/releases/tag/v2.0.0) | +| Powertools for AWS Lambda Python | 1.x | End of Support | 06/18/2020 | See [RFC](https://github.com/aws-powertools/powertools-lambda-python/issues/1459) and [upgrade guide](https://docs.powertools.aws.dev/lambda/python/latest/upgrade/) | diff --git a/mkdocs.yml b/mkdocs.yml index 0a844fd392f..a862430a054 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -38,6 +38,7 @@ nav: - Processes: - Security: security.md - Automation: automation.md + - Versioning policy: versioning.md - Roadmap: roadmap.md - Maintainers: maintainers.md - Contributing: From b7845980297db326ffc257b9f097c22fffefea85 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 08:55:35 +0000 Subject: [PATCH 26/32] chore(ci): changelog rebuild (#3690) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 479bde82545..b5024e61b44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ ## Documentation * **metrics:** fix empty metric warning filter ([#3660](https://github.com/aws-powertools/powertools-lambda-python/issues/3660)) +* **proccess:** add versioning and maintenance policy ([#3682](https://github.com/aws-powertools/powertools-lambda-python/issues/3682)) ## Features @@ -19,16 +20,16 @@ ## Maintenance * **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) +* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) * **deps:** bump squidfunk/mkdocs-material from `9aad7af` to `a4a2029` in /docs ([#3679](https://github.com/aws-powertools/powertools-lambda-python/issues/3679)) -* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) -* **deps:** bump codecov/codecov-action from 3.1.5 to 3.1.6 ([#3683](https://github.com/aws-powertools/powertools-lambda-python/issues/3683)) * **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) -* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) -* **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) -* **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) +* **deps:** bump codecov/codecov-action from 3.1.5 to 3.1.6 ([#3683](https://github.com/aws-powertools/powertools-lambda-python/issues/3683)) +* **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) * **deps-dev:** bump aws-cdk from 2.123.0 to 2.124.0 ([#3678](https://github.com/aws-powertools/powertools-lambda-python/issues/3678)) * **deps-dev:** bump sentry-sdk from 1.39.2 to 1.40.0 ([#3684](https://github.com/aws-powertools/powertools-lambda-python/issues/3684)) +* **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) * **deps-dev:** bump ruff from 0.1.14 to 0.1.15 ([#3685](https://github.com/aws-powertools/powertools-lambda-python/issues/3685)) +* **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) From ced0a3d620714c9dd0188f09718c5bf88f00c2e1 Mon Sep 17 00:00:00 2001 From: seshubaws <116689586+seshubaws@users.noreply.github.com> Date: Thu, 1 Feb 2024 03:56:19 -0800 Subject: [PATCH 27/32] docs(data-masking): add docs for data masking utility (#3186) --- Makefile | 4 +- README.md | 1 + aws_lambda_powertools/shared/functions.py | 12 +- .../utilities/_data_masking/base.py | 174 ----- .../utilities/_data_masking/constants.py | 5 - .../_data_masking/provider/__init__.py | 5 - .../utilities/_data_masking/provider/base.py | 34 - .../_data_masking/provider/kms/__init__.py | 5 - .../provider/kms/aws_encryption_sdk.py | 177 ----- .../__init__.py | 2 +- .../utilities/data_masking/base.py | 291 ++++++++ .../utilities/data_masking/constants.py | 14 + .../utilities/data_masking/exceptions.py | 34 + .../data_masking/provider/__init__.py | 5 + .../utilities/data_masking/provider/base.py | 81 +++ .../data_masking/provider/kms/__init__.py | 5 + .../provider/kms/aws_encryption_sdk.py | 247 +++++++ docs/index.md | 1 + docs/utilities/data_masking.md | 638 ++++++++++++++++++ examples/data_masking/sam/template.yaml | 67 ++ .../src/advanced_custom_serializer.py | 26 + .../src/aws_encryption_provider_example.py | 34 + .../src/changing_default_algorithm.py | 33 + .../src/choosing_payload_all_nested_keys.json | 19 + ...oosing_payload_all_nested_keys_output.json | 9 + .../choosing_payload_complex_nested_keys.json | 11 + ...ng_payload_complex_nested_keys_output.json | 11 + .../src/choosing_payload_complex_search.json | 19 + ...hoosing_payload_complex_search_output.json | 19 + .../src/choosing_payload_list_all_index.json | 15 + ...hoosing_payload_list_all_index_output.json | 16 + .../src/choosing_payload_list_index.json | 15 + .../choosing_payload_list_index_output.json | 16 + .../src/choosing_payload_list_slice.json | 19 + .../choosing_payload_list_slice_output.json | 19 + .../src/choosing_payload_multiple_keys.json | 9 + ...choosing_payload_multiple_keys_output.json | 9 + .../src/choosing_payload_nested_key.json | 8 + .../choosing_payload_nested_key_output.json | 8 + .../src/choosing_payload_simple_json.json | 1 + .../choosing_payload_simple_json_output.json | 8 + .../src/choosing_payload_top_keys.json | 5 + .../src/choosing_payload_top_keys_output.json | 5 + .../src/data_masking_function_example.py | 26 + .../data_masking_function_example_output.json | 34 + .../data_masking/src/encrypt_data_output.json | 3 + .../data_masking/src/generic_data_input.json | 21 + .../src/getting_started_decrypt_data.py | 26 + .../getting_started_decrypt_data_input.json | 3 + .../getting_started_decrypt_data_output.json | 18 + .../src/getting_started_decryption_context.py | 31 + .../src/getting_started_encrypt_data.py | 28 + .../src/getting_started_encryption_context.py | 31 + .../src/getting_started_erase_data.py | 19 + .../getting_started_erase_data_output.json | 13 + .../data_masking/src/large_data_input.json | 32 + .../data_masking/src/using_multiple_keys.py | 29 + examples/data_masking/tests/lambda_mask.py | 14 + .../data_masking/tests/test_lambda_mask.py | 30 + mkdocs.yml | 1 + mypy.ini | 8 +- poetry.lock | 410 +++++------ pyproject.toml | 5 +- .../data_masking/handlers/basic_handler.py | 6 +- .../e2e/data_masking/test_e2e_data_masking.py | 18 +- .../data_masking/test_aws_encryption_sdk.py | 301 +++++++-- .../pt-load-test-stack/function_1024/app.py | 6 +- .../pt-load-test-stack/function_128/app.py | 6 +- .../pt-load-test-stack/function_1769/app.py | 6 +- .../pt-load-test-stack/template.yaml | 12 +- .../data_masking/test_perf_data_masking.py | 12 +- tests/unit/data_masking/test_kms_provider.py | 42 ++ .../data_masking/test_unit_data_masking.py | 132 ++-- 73 files changed, 2709 insertions(+), 750 deletions(-) delete mode 100644 aws_lambda_powertools/utilities/_data_masking/base.py delete mode 100644 aws_lambda_powertools/utilities/_data_masking/constants.py delete mode 100644 aws_lambda_powertools/utilities/_data_masking/provider/__init__.py delete mode 100644 aws_lambda_powertools/utilities/_data_masking/provider/base.py delete mode 100644 aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py delete mode 100644 aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py rename aws_lambda_powertools/utilities/{_data_masking => data_masking}/__init__.py (81%) create mode 100644 aws_lambda_powertools/utilities/data_masking/base.py create mode 100644 aws_lambda_powertools/utilities/data_masking/constants.py create mode 100644 aws_lambda_powertools/utilities/data_masking/exceptions.py create mode 100644 aws_lambda_powertools/utilities/data_masking/provider/__init__.py create mode 100644 aws_lambda_powertools/utilities/data_masking/provider/base.py create mode 100644 aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py create mode 100644 aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py create mode 100644 docs/utilities/data_masking.md create mode 100644 examples/data_masking/sam/template.yaml create mode 100644 examples/data_masking/src/advanced_custom_serializer.py create mode 100644 examples/data_masking/src/aws_encryption_provider_example.py create mode 100644 examples/data_masking/src/changing_default_algorithm.py create mode 100644 examples/data_masking/src/choosing_payload_all_nested_keys.json create mode 100644 examples/data_masking/src/choosing_payload_all_nested_keys_output.json create mode 100644 examples/data_masking/src/choosing_payload_complex_nested_keys.json create mode 100644 examples/data_masking/src/choosing_payload_complex_nested_keys_output.json create mode 100644 examples/data_masking/src/choosing_payload_complex_search.json create mode 100644 examples/data_masking/src/choosing_payload_complex_search_output.json create mode 100644 examples/data_masking/src/choosing_payload_list_all_index.json create mode 100644 examples/data_masking/src/choosing_payload_list_all_index_output.json create mode 100644 examples/data_masking/src/choosing_payload_list_index.json create mode 100644 examples/data_masking/src/choosing_payload_list_index_output.json create mode 100644 examples/data_masking/src/choosing_payload_list_slice.json create mode 100644 examples/data_masking/src/choosing_payload_list_slice_output.json create mode 100644 examples/data_masking/src/choosing_payload_multiple_keys.json create mode 100644 examples/data_masking/src/choosing_payload_multiple_keys_output.json create mode 100644 examples/data_masking/src/choosing_payload_nested_key.json create mode 100644 examples/data_masking/src/choosing_payload_nested_key_output.json create mode 100644 examples/data_masking/src/choosing_payload_simple_json.json create mode 100644 examples/data_masking/src/choosing_payload_simple_json_output.json create mode 100644 examples/data_masking/src/choosing_payload_top_keys.json create mode 100644 examples/data_masking/src/choosing_payload_top_keys_output.json create mode 100644 examples/data_masking/src/data_masking_function_example.py create mode 100644 examples/data_masking/src/data_masking_function_example_output.json create mode 100644 examples/data_masking/src/encrypt_data_output.json create mode 100644 examples/data_masking/src/generic_data_input.json create mode 100644 examples/data_masking/src/getting_started_decrypt_data.py create mode 100644 examples/data_masking/src/getting_started_decrypt_data_input.json create mode 100644 examples/data_masking/src/getting_started_decrypt_data_output.json create mode 100644 examples/data_masking/src/getting_started_decryption_context.py create mode 100644 examples/data_masking/src/getting_started_encrypt_data.py create mode 100644 examples/data_masking/src/getting_started_encryption_context.py create mode 100644 examples/data_masking/src/getting_started_erase_data.py create mode 100644 examples/data_masking/src/getting_started_erase_data_output.json create mode 100644 examples/data_masking/src/large_data_input.json create mode 100644 examples/data_masking/src/using_multiple_keys.py create mode 100644 examples/data_masking/tests/lambda_mask.py create mode 100644 examples/data_masking/tests/test_lambda_mask.py create mode 100644 tests/unit/data_masking/test_kms_provider.py diff --git a/Makefile b/Makefile index 80c89f72961..7fa170b28c6 100644 --- a/Makefile +++ b/Makefile @@ -8,13 +8,13 @@ dev: pip install --upgrade pip pre-commit poetry poetry config --local virtualenvs.in-project true @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk redis" + poetry install --extras "all redis" pre-commit install dev-gitpod: pip install --upgrade pip poetry @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk redis" + poetry install --extras "all redis" pre-commit install format: diff --git a/README.md b/README.md index d230c31906e..d3f0ec30603 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,7 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles * **[Event source data classes](https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/)** - Data classes describing the schema of common Lambda event triggers * **[Parser](https://docs.powertools.aws.dev/lambda/python/latest/utilities/parser/)** - Data parsing and deep validation using Pydantic * **[Idempotency](https://docs.powertools.aws.dev/lambda/python/latest/utilities/idempotency/)** - Convert your Lambda functions into idempotent operations which are safe to retry +* **[Data Masking](https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_masking/)** - Protect confidential data with easy removal or encryption * **[Feature Flags](https://docs.powertools.aws.dev/lambda/python/latest/utilities/feature_flags/)** - A simple rule engine to evaluate when one or multiple features should be enabled depending on the input * **[Streaming](https://docs.powertools.aws.dev/lambda/python/latest/utilities/streaming/)** - Streams datasets larger than the available memory as streaming data. diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index c427f0d720f..9765f55c025 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -96,10 +96,18 @@ def resolve_env_var_choice( def base64_decode(value: str) -> bytes: try: - logger.debug("Decoding base64 record item before parsing") + logger.debug("Decoding base64 item to bytes") return base64.b64decode(value) except (BinAsciiError, TypeError): - raise ValueError("base64 decode failed") + raise ValueError("base64 decode failed - is this base64 encoded string?") + + +def bytes_to_base64_string(value: bytes) -> str: + try: + logger.debug("Encoding bytes to base64 string") + return base64.b64encode(value).decode() + except TypeError: + raise ValueError(f"base64 encoding failed - is this bytes data? type: {type(value)}") def bytes_to_string(value: bytes) -> str: diff --git a/aws_lambda_powertools/utilities/_data_masking/base.py b/aws_lambda_powertools/utilities/_data_masking/base.py deleted file mode 100644 index 211e44c3759..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/base.py +++ /dev/null @@ -1,174 +0,0 @@ -import json -from typing import Optional, Union - -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider - - -class DataMasking: - """ - Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations. - Please DON'T USE THIS utility in production environments. - Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. - - A utility class for masking sensitive data within various data types. - - This class provides methods for masking sensitive information, such as personal - identifiers or confidential data, within different data types such as strings, - dictionaries, lists, and more. It helps protect sensitive information while - preserving the structure of the original data. - - Usage: - Instantiate an object of this class and use its methods to mask sensitive data - based on the data type. Supported data types include strings, dictionaries, - and more. - - Example: - ``` - from aws_lambda_powertools.utilities.data_masking.base import DataMasking - - def lambda_handler(event, context): - masker = DataMasking() - - data = { - "project": "powertools", - "sensitive": "xxxxxxxxxx" - } - - masked = masker.mask(data,fields=["sensitive"]) - - return masked - - ``` - """ - - def __init__(self, provider: Optional[BaseProvider] = None): - self.provider = provider or BaseProvider() - - def encrypt(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.encrypt, **provider_options) - - def decrypt(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.decrypt, **provider_options) - - def mask(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.mask, **provider_options) - - def _apply_action(self, data, fields, action, **provider_options): - """ - Helper method to determine whether to apply a given action to the entire input data - or to specific fields if the 'fields' argument is specified. - - Parameters - ---------- - data : any - The input data to process. - fields : Optional[List[any]] = None - A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'. - action : Callable - The action to apply to the data. It should be a callable that performs an operation on the data - and returns the modified value. - - Returns - ------- - any - The modified data after applying the action. - """ - - if fields is not None: - return self._apply_action_to_fields(data, fields, action, **provider_options) - else: - return action(data, **provider_options) - - def _apply_action_to_fields( - self, - data: Union[dict, str], - fields: list, - action, - **provider_options, - ) -> Union[dict, str]: - """ - This method takes the input data, which can be either a dictionary or a JSON string, - and applies a mask, an encryption, or a decryption to the specified fields. - - Parameters - ---------- - data : Union[dict, str]) - The input data to process. It can be either a dictionary or a JSON string. - fields : List - A list of fields to apply the action to. Each field can be specified as a string or - a list of strings representing nested keys in the dictionary. - action : Callable - The action to apply to the fields. It should be a callable that takes the current - value of the field as the first argument and any additional arguments that might be required - for the action. It performs an operation on the current value using the provided arguments and - returns the modified value. - **provider_options: - Additional keyword arguments to pass to the 'action' function. - - Returns - ------- - dict - The modified dictionary after applying the action to the - specified fields. - - Raises - ------- - ValueError - If 'fields' parameter is None. - TypeError - If the 'data' parameter is not a traversable type - - Example - ------- - ```python - >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}} - >>> fields = ['a.b.c', 'a.x.y'] - # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2) - # and store the result as: - new_dict = {'a': {'b': {'c': 'transformed_value'}}, 'x': {'y': 'transformed_value'}} - ``` - """ - - if fields is None: - raise ValueError("No fields specified.") - - if isinstance(data, str): - # Parse JSON string as dictionary - my_dict_parsed = json.loads(data) - elif isinstance(data, dict): - # In case their data has keys that are not strings (i.e. ints), convert it all into a JSON string - my_dict_parsed = json.dumps(data) - # Turn back into dict so can parse it - my_dict_parsed = json.loads(my_dict_parsed) - else: - raise TypeError( - f"Unsupported data type for 'data' parameter. Expected a traversable type, but got {type(data)}.", - ) - - # For example: ['a.b.c'] in ['a.b.c', 'a.x.y'] - for nested_key in fields: - # Prevent overriding loop variable - curr_nested_key = nested_key - - # If the nested_key is not a string, convert it to a string representation - if not isinstance(curr_nested_key, str): - curr_nested_key = json.dumps(curr_nested_key) - - # Split the nested key string into a list of nested keys - # ['a.b.c'] -> ['a', 'b', 'c'] - keys = curr_nested_key.split(".") - - # Initialize a current dictionary to the root dictionary - curr_dict = my_dict_parsed - - # Traverse the dictionary hierarchy by iterating through the list of nested keys - for key in keys[:-1]: - curr_dict = curr_dict[key] - - # Retrieve the final value of the nested field - valtochange = curr_dict[(keys[-1])] - - # Apply the specified 'action' to the target value - curr_dict[keys[-1]] = action(valtochange, **provider_options) - - return my_dict_parsed diff --git a/aws_lambda_powertools/utilities/_data_masking/constants.py b/aws_lambda_powertools/utilities/_data_masking/constants.py deleted file mode 100644 index 47e74f472cf..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/constants.py +++ /dev/null @@ -1,5 +0,0 @@ -DATA_MASKING_STRING: str = "*****" -CACHE_CAPACITY: int = 100 -MAX_CACHE_AGE_SECONDS: float = 300.0 -MAX_MESSAGES_ENCRYPTED: int = 200 -# NOTE: You can also set max messages/bytes per data key diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py deleted file mode 100644 index 7ee07f964b1..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from aws_lambda_powertools.utilities._data_masking.provider.base import BaseProvider - -__all__ = [ - "BaseProvider", -] diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/base.py b/aws_lambda_powertools/utilities/_data_masking/provider/base.py deleted file mode 100644 index a293c6aff9a..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/base.py +++ /dev/null @@ -1,34 +0,0 @@ -import json -from typing import Any - -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING - - -class BaseProvider: - """ - When you try to create an instance of a subclass that does not implement the encrypt method, - you will get a NotImplementedError with a message that says the method is not implemented: - """ - - def __init__(self, json_serializer=None, json_deserializer=None) -> None: - self.json_serializer = json_serializer or self.default_json_serializer - self.json_deserializer = json_deserializer or self.default_json_deserializer - - def default_json_serializer(self, data): - return json.dumps(data).encode("utf-8") - - def default_json_deserializer(self, data): - return json.loads(data.decode("utf-8")) - - def encrypt(self, data) -> str: - raise NotImplementedError("Subclasses must implement encrypt()") - - def decrypt(self, data) -> Any: - raise NotImplementedError("Subclasses must implement decrypt()") - - def mask(self, data) -> Any: - if isinstance(data, (str, dict, bytes)): - return DATA_MASKING_STRING - elif isinstance(data, (list, tuple, set)): - return type(data)([DATA_MASKING_STRING] * len(data)) - return DATA_MASKING_STRING diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py deleted file mode 100644 index f257339d634..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider - -__all__ = [ - "AwsEncryptionSdkProvider", -] diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py deleted file mode 100644 index a895f8de0ac..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py +++ /dev/null @@ -1,177 +0,0 @@ -from __future__ import annotations - -import base64 -from typing import Any, Callable, Dict, List - -import botocore -from aws_encryption_sdk import ( - CachingCryptoMaterialsManager, - EncryptionSDKClient, - LocalCryptoMaterialsCache, - StrictAwsKmsMasterKeyProvider, -) - -from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session -from aws_lambda_powertools.utilities._data_masking.constants import ( - CACHE_CAPACITY, - MAX_CACHE_AGE_SECONDS, - MAX_MESSAGES_ENCRYPTED, -) -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider - - -class ContextMismatchError(Exception): - def __init__(self, key): - super().__init__(f"Encryption Context does not match expected value for key: {key}") - self.key = key - - -class AwsEncryptionSdkProvider(BaseProvider): - """ - The AwsEncryptionSdkProvider is used as a provider for the DataMasking class. - - This provider allows you to perform data masking using the AWS Encryption SDK - for encryption and decryption. It integrates with the DataMasking class to - securely encrypt and decrypt sensitive data. - - Usage Example: - ``` - from aws_lambda_powertools.utilities.data_masking import DataMasking - from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import ( - AwsEncryptionSdkProvider, - ) - - - def lambda_handler(event, context): - provider = AwsEncryptionSdkProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"]) - masker = DataMasking(provider=provider) - - data = { - "project": "powertools", - "sensitive": "xxxxxxxxxx" - } - - masked = masker.encrypt(data,fields=["sensitive"]) - - return masked - - ``` - """ - - def __init__( - self, - keys: List[str], - key_provider=None, - local_cache_capacity: int = CACHE_CAPACITY, - max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, - max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, - json_serializer: Callable | None = None, - json_deserializer: Callable | None = None, - ): - super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) - - self._key_provider = key_provider or KMSKeyProvider( - keys=keys, - local_cache_capacity=local_cache_capacity, - max_cache_age_seconds=max_cache_age_seconds, - max_messages_encrypted=max_messages_encrypted, - json_serializer=self.json_serializer, - json_deserializer=self.json_deserializer, - ) - - def encrypt(self, data: bytes | str | Dict | int, **provider_options) -> str: - return self._key_provider.encrypt(data=data, **provider_options) - - def decrypt(self, data: str, **provider_options) -> Any: - return self._key_provider.decrypt(data=data, **provider_options) - - -class KMSKeyProvider: - - """ - The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS) - client, a caching mechanism, and a keyring for secure key management and data encryption. - """ - - def __init__( - self, - keys: List[str], - json_serializer: Callable, - json_deserializer: Callable, - local_cache_capacity: int = CACHE_CAPACITY, - max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, - max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, - ): - session = botocore.session.Session() - register_feature_to_botocore_session(session, "data-masking") - - self.json_serializer = json_serializer - self.json_deserializer = json_deserializer - self.client = EncryptionSDKClient() - self.keys = keys - self.cache = LocalCryptoMaterialsCache(local_cache_capacity) - self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) - self.cache_cmm = CachingCryptoMaterialsManager( - master_key_provider=self.key_provider, - cache=self.cache, - max_age=max_cache_age_seconds, - max_messages_encrypted=max_messages_encrypted, - ) - - def encrypt(self, data: bytes | str | Dict | float, **provider_options) -> str: - """ - Encrypt data using the AwsEncryptionSdkProvider. - - Parameters - ------- - data : Union[bytes, str] - The data to be encrypted. - provider_options - Additional options for the aws_encryption_sdk.EncryptionSDKClient - - Returns - ------- - ciphertext : str - The encrypted data, as a base64-encoded string. - """ - data_encoded = self.json_serializer(data) - ciphertext, _ = self.client.encrypt( - source=data_encoded, - materials_manager=self.cache_cmm, - **provider_options, - ) - ciphertext = base64.b64encode(ciphertext).decode() - return ciphertext - - def decrypt(self, data: str, **provider_options) -> Any: - """ - Decrypt data using AwsEncryptionSdkProvider. - - Parameters - ------- - data : Union[bytes, str] - The encrypted data, as a base64-encoded string - provider_options - Additional options for the aws_encryption_sdk.EncryptionSDKClient - - Returns - ------- - ciphertext : bytes - The decrypted data in bytes - """ - ciphertext_decoded = base64.b64decode(data) - - expected_context = provider_options.pop("encryption_context", {}) - - ciphertext, decryptor_header = self.client.decrypt( - source=ciphertext_decoded, - key_provider=self.key_provider, - **provider_options, - ) - - for key, value in expected_context.items(): - if decryptor_header.encryption_context.get(key) != value: - raise ContextMismatchError(key) - - ciphertext = self.json_deserializer(ciphertext) - return ciphertext diff --git a/aws_lambda_powertools/utilities/_data_masking/__init__.py b/aws_lambda_powertools/utilities/data_masking/__init__.py similarity index 81% rename from aws_lambda_powertools/utilities/_data_masking/__init__.py rename to aws_lambda_powertools/utilities/data_masking/__init__.py index 806c856ba75..4d767e83ce1 100644 --- a/aws_lambda_powertools/utilities/_data_masking/__init__.py +++ b/aws_lambda_powertools/utilities/data_masking/__init__.py @@ -4,7 +4,7 @@ Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. """ -from aws_lambda_powertools.utilities._data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.base import DataMasking __all__ = [ "DataMasking", diff --git a/aws_lambda_powertools/utilities/data_masking/base.py b/aws_lambda_powertools/utilities/data_masking/base.py new file mode 100644 index 00000000000..c2557dcef24 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/base.py @@ -0,0 +1,291 @@ +from __future__ import annotations + +import functools +import logging +import warnings +from numbers import Number +from typing import Any, Callable, Mapping, Optional, Sequence, Union, overload + +from jsonpath_ng.ext import parse + +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingFieldNotFoundError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + +logger = logging.getLogger(__name__) + + +class DataMasking: + """ + Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations. + Please DON'T USE THIS utility in production environments. + Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. + + The DataMasking class orchestrates erasing, encrypting, and decrypting + for the base provider. + + Example: + ``` + from aws_lambda_powertools.utilities.data_masking.base import DataMasking + + def lambda_handler(event, context): + masker = DataMasking() + + data = { + "project": "powertools", + "sensitive": "password" + } + + erased = masker.erase(data,fields=["sensitive"]) + + return erased + + ``` + """ + + def __init__( + self, + provider: Optional[BaseProvider] = None, + raise_on_missing_field: bool = True, + ): + self.provider = provider or BaseProvider() + # NOTE: we depend on Provider to not confuse customers in passing the same 2 serializers in 2 places + self.json_serializer = self.provider.json_serializer + self.json_deserializer = self.provider.json_deserializer + self.raise_on_missing_field = raise_on_missing_field + + def encrypt( + self, + data: dict | Mapping | Sequence | Number, + provider_options: dict | None = None, + **encryption_context: str, + ) -> str: + return self._apply_action( + data=data, + fields=None, + action=self.provider.encrypt, + provider_options=provider_options or {}, + **encryption_context, + ) + + def decrypt( + self, + data, + provider_options: dict | None = None, + **encryption_context: str, + ) -> Any: + return self._apply_action( + data=data, + fields=None, + action=self.provider.decrypt, + provider_options=provider_options or {}, + **encryption_context, + ) + + @overload + def erase(self, data, fields: None) -> str: + ... + + @overload + def erase(self, data: list, fields: list[str]) -> list[str]: + ... + + @overload + def erase(self, data: tuple, fields: list[str]) -> tuple[str]: + ... + + @overload + def erase(self, data: dict, fields: list[str]) -> dict: + ... + + def erase(self, data: Sequence | Mapping, fields: list[str] | None = None) -> str | list[str] | tuple[str] | dict: + return self._apply_action(data=data, fields=fields, action=self.provider.erase) + + def _apply_action( + self, + data, + fields: list[str] | None, + action: Callable, + provider_options: dict | None = None, + **encryption_context: str, + ): + """ + Helper method to determine whether to apply a given action to the entire input data + or to specific fields if the 'fields' argument is specified. + + Parameters + ---------- + data : str | dict + The input data to process. + fields : Optional[List[str]] + A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'. + action : Callable + The action to apply to the data. It should be a callable that performs an operation on the data + and returns the modified value. + provider_options : dict + Provider specific keyword arguments to propagate; used as an escape hatch. + encryption_context: str + Encryption context to use in encrypt and decrypt operations. + + Returns + ------- + any + The modified data after applying the action. + """ + + if fields is not None: + logger.debug(f"Running action {action.__name__} with fields {fields}") + return self._apply_action_to_fields( + data=data, + fields=fields, + action=action, + provider_options=provider_options, + **encryption_context, + ) + else: + logger.debug(f"Running action {action.__name__} with the entire data") + return action(data=data, provider_options=provider_options, **encryption_context) + + def _apply_action_to_fields( + self, + data: Union[dict, str], + fields: list, + action: Callable, + provider_options: dict | None = None, + **encryption_context: str, + ) -> Union[dict, str]: + """ + This method takes the input data, which can be either a dictionary or a JSON string, + and erases, encrypts, or decrypts the specified fields. + + Parameters + ---------- + data : Union[dict, str]) + The input data to process. It can be either a dictionary or a JSON string. + fields : List + A list of fields to apply the action to. Each field can be specified as a string or + a list of strings representing nested keys in the dictionary. + action : Callable + The action to apply to the fields. It should be a callable that takes the current + value of the field as the first argument and any additional arguments that might be required + for the action. It performs an operation on the current value using the provided arguments and + returns the modified value. + provider_options : dict + Optional dictionary representing additional options for the action. + **encryption_context: str + Additional keyword arguments collected into a dictionary. + + Returns + ------- + dict | str + The modified dictionary or string after applying the action to the + specified fields. + + Raises + ------- + ValueError + If 'fields' parameter is None. + TypeError + If the 'data' parameter is not a traversable type + + Example + ------- + ```python + >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}} + >>> fields = ['a.b.c', 'a.x.y'] + # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2) + # and store the result as: + new_dict = {'a': {'b': {'c': '*****'}}, 'x': {'y': '*****'}} + ``` + """ + + data_parsed: dict = self._normalize_data_to_parse(fields, data) + + # For in-place updates, json_parse accepts a callback function + # this function must receive 3 args: field_value, fields, field_name + # We create a partial callback to pre-populate known options (action, provider opts, enc ctx) + update_callback = functools.partial( + self._call_action, + action=action, + provider_options=provider_options, + **encryption_context, + ) + + # Iterate over each field to be parsed. + for field_parse in fields: + # Parse the field expression using a 'parse' function. + json_parse = parse(field_parse) + # Find the corresponding keys in the normalized data using the parsed expression. + result_parse = json_parse.find(data_parsed) + + if not result_parse: + if self.raise_on_missing_field: + # If the data for the field is not found, raise an exception. + raise DataMaskingFieldNotFoundError(f"Field or expression {field_parse} not found in {data_parsed}") + else: + # If the data for the field is not found, warning. + warnings.warn(f"Field or expression {field_parse} not found in {data_parsed}", stacklevel=2) + + # For in-place updates, json_parse accepts a callback function + # that receives 3 args: field_value, fields, field_name + # We create a partial callback to pre-populate known provider options (action, provider opts, enc ctx) + update_callback = functools.partial( + self._call_action, + action=action, + provider_options=provider_options, + **encryption_context, + ) + + json_parse.update( + data_parsed, + lambda field_value, fields, field_name: update_callback(field_value, fields, field_name), # noqa: B023 + ) + + return data_parsed + + @staticmethod + def _call_action( + field_value: Any, + fields: dict[str, Any], + field_name: str, + action: Callable, + provider_options: dict | None = None, + **encryption_context, + ) -> None: + """ + Apply a specified action to a field value and update the fields dictionary. + + Params: + -------- + - field_value: Current value of the field being processed. + - fields: Dictionary representing the fields being processed (mutable). + - field_name: Name of the field being processed. + - action: Callable (function or method) to be applied to the field_value. + - provider_options: Optional dictionary representing additional options for the action. + - **encryption_context: Additional keyword arguments collected into a dictionary. + + Returns: + - fields[field_name]: Returns the processed field value + """ + fields[field_name] = action(field_value, provider_options=provider_options, **encryption_context) + return fields[field_name] + + def _normalize_data_to_parse(self, fields: list, data: str | dict) -> dict: + if not fields: + raise ValueError("No fields specified.") + + if isinstance(data, str): + # Parse JSON string as dictionary + data_parsed = self.json_deserializer(data) + elif isinstance(data, dict): + # Convert the data to a JSON string in case it contains non-string keys (e.g., ints) + # Parse the JSON string back into a dictionary + data_parsed = self.json_deserializer(self.json_serializer(data)) + else: + raise DataMaskingUnsupportedTypeError( + f"Unsupported data type. Expected a traversable type (dict or str), but got {type(data)}.", + ) + + return data_parsed diff --git a/aws_lambda_powertools/utilities/data_masking/constants.py b/aws_lambda_powertools/utilities/data_masking/constants.py new file mode 100644 index 00000000000..f35f4291e40 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/constants.py @@ -0,0 +1,14 @@ +# The string that replaces values that have been erased +DATA_MASKING_STRING: str = "*****" +# The maximum number of entries that can be retained in the local cryptographic materials cache +CACHE_CAPACITY: int = 100 +# The maximum time (in seconds) that a cache entry may be kept in the cache +MAX_CACHE_AGE_SECONDS: float = 300.0 +# Maximum number of messages which are allowed to be encrypted under a single cached data key +# Values can be [1 - 4294967296] (2 ** 32) +MAX_MESSAGES_ENCRYPTED: int = 4294967296 +# Maximum number of bytes which are allowed to be encrypted under a single cached data key +# Values can be [1 - 9223372036854775807] (2 ** 63 - 1) +MAX_BYTES_ENCRYPTED: int = 9223372036854775807 + +ENCRYPTED_DATA_KEY_CTX_KEY = "aws-crypto-public-key" diff --git a/aws_lambda_powertools/utilities/data_masking/exceptions.py b/aws_lambda_powertools/utilities/data_masking/exceptions.py new file mode 100644 index 00000000000..7c962ddf385 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/exceptions.py @@ -0,0 +1,34 @@ +class DataMaskingUnsupportedTypeError(Exception): + """ + UnsupportedType Error + """ + + +class DataMaskingDecryptKeyError(Exception): + """ + Decrypting with an invalid AWS KMS Key ARN. + """ + + +class DataMaskingEncryptKeyError(Exception): + """ + Encrypting with an invalid AWS KMS Key ARN. + """ + + +class DataMaskingDecryptValueError(Exception): + """ + Decrypting an invalid field. + """ + + +class DataMaskingContextMismatchError(Exception): + """ + Decrypting with the incorrect encryption context. + """ + + +class DataMaskingFieldNotFoundError(Exception): + """ + Field not found. + """ diff --git a/aws_lambda_powertools/utilities/data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py new file mode 100644 index 00000000000..5a0180eb82b --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.base import BaseProvider + +__all__ = [ + "BaseProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/base.py b/aws_lambda_powertools/utilities/data_masking/provider/base.py new file mode 100644 index 00000000000..3aacba1b7b2 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/base.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import functools +import json +from typing import Any, Callable, Iterable + +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING + + +class BaseProvider: + """ + The BaseProvider class serves as an abstract base class for data masking providers. + + Examples + -------- + ``` + from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider + from aws_lambda_powertools.utilities.data_masking import DataMasking + + class MyCustomProvider(BaseProvider): + def encrypt(self, data) -> str: + # Implementation logic for data encryption + + def decrypt(self, data) -> Any: + # Implementation logic for data decryption + + def erase(self, data) -> Union[str, Iterable]: + # Implementation logic for data masking + pass + + def lambda_handler(event, context): + provider = MyCustomProvider(["secret-key"]) + data_masker = DataMasking(provider=provider) + + data = { + "project": "powertools", + "sensitive": "password" + } + + encrypted = data_masker.encrypt(data) + + return encrypted + ``` + """ + + def __init__( + self, + json_serializer: Callable[..., str] = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable[[str], Any] = json.loads, + ) -> None: + self.json_serializer = json_serializer + self.json_deserializer = json_deserializer + + def encrypt(self, data, provider_options: dict | None = None, **encryption_context: str) -> str: + """ + Abstract method for encrypting data. Subclasses must implement this method. + """ + raise NotImplementedError("Subclasses must implement encrypt()") + + def decrypt(self, data, provider_options: dict | None = None, **encryption_context: str) -> Any: + """ + Abstract method for decrypting data. Subclasses must implement this method. + """ + raise NotImplementedError("Subclasses must implement decrypt()") + + def erase(self, data, **kwargs) -> Iterable[str]: + """ + This method irreversibly erases data. + + If the data to be erased is of type `str`, `dict`, or `bytes`, + this method will return an erased string, i.e. "*****". + + If the data to be erased is of an iterable type like `list`, `tuple`, + or `set`, this method will return a new object of the same type as the + input data but with each element replaced by the string "*****". + """ + if isinstance(data, (str, dict, bytes)): + return DATA_MASKING_STRING + elif isinstance(data, (list, tuple, set)): + return type(data)([DATA_MASKING_STRING] * len(data)) + return DATA_MASKING_STRING diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py new file mode 100644 index 00000000000..c1353094144 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider + +__all__ = [ + "AWSEncryptionSDKProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py new file mode 100644 index 00000000000..bbdbb0bad6f --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py @@ -0,0 +1,247 @@ +from __future__ import annotations + +import functools +import json +import logging +from binascii import Error +from typing import Any, Callable, List + +import botocore +from aws_encryption_sdk import ( + CachingCryptoMaterialsManager, + EncryptionSDKClient, + LocalCryptoMaterialsCache, + StrictAwsKmsMasterKeyProvider, +) +from aws_encryption_sdk.exceptions import ( + DecryptKeyError, + GenerateKeyError, + NotSupportedError, +) +from aws_encryption_sdk.structures import MessageHeader + +from aws_lambda_powertools.shared.functions import ( + base64_decode, + bytes_to_base64_string, + bytes_to_string, +) +from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session +from aws_lambda_powertools.utilities.data_masking.constants import ( + CACHE_CAPACITY, + ENCRYPTED_DATA_KEY_CTX_KEY, + MAX_BYTES_ENCRYPTED, + MAX_CACHE_AGE_SECONDS, + MAX_MESSAGES_ENCRYPTED, +) +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingContextMismatchError, + DataMaskingDecryptKeyError, + DataMaskingDecryptValueError, + DataMaskingEncryptKeyError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + +logger = logging.getLogger(__name__) + + +class AWSEncryptionSDKProvider(BaseProvider): + """ + The AWSEncryptionSDKProvider is used as a provider for the DataMasking class. + + Usage + ------- + ``` + from aws_lambda_powertools.utilities.data_masking import DataMasking + from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, + ) + + + def lambda_handler(event, context): + provider = AWSEncryptionSDKProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"]) + data_masker = DataMasking(provider=provider) + + data = { + "project": "powertools", + "sensitive": "password" + } + + encrypted = data_masker.encrypt(data) + + return encrypted + + ``` + """ + + def __init__( + self, + keys: List[str], + key_provider=None, + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + max_bytes_encrypted: int = MAX_BYTES_ENCRYPTED, + json_serializer: Callable[..., str] = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable[[str], Any] = json.loads, + ): + super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + + self._key_provider = key_provider or KMSKeyProvider( + keys=keys, + local_cache_capacity=local_cache_capacity, + max_cache_age_seconds=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + max_bytes_encrypted=max_bytes_encrypted, + json_serializer=json_serializer, + json_deserializer=json_deserializer, + ) + + def encrypt(self, data: Any, provider_options: dict | None = None, **encryption_context: str) -> str: + return self._key_provider.encrypt(data=data, provider_options=provider_options, **encryption_context) + + def decrypt(self, data: str, provider_options: dict | None = None, **encryption_context: str) -> Any: + return self._key_provider.decrypt(data=data, provider_options=provider_options, **encryption_context) + + +class KMSKeyProvider: + + """ + The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS) + client, a caching mechanism, and a keyring for secure key management and data encryption. + """ + + def __init__( + self, + keys: List[str], + json_serializer: Callable[..., str], + json_deserializer: Callable[[str], Any], + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + max_bytes_encrypted: int = MAX_BYTES_ENCRYPTED, + ): + session = botocore.session.Session() + register_feature_to_botocore_session(session, "data-masking") + + self.json_serializer = json_serializer + self.json_deserializer = json_deserializer + self.client = EncryptionSDKClient() + self.keys = keys + self.cache = LocalCryptoMaterialsCache(local_cache_capacity) + self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) + self.cache_cmm = CachingCryptoMaterialsManager( + master_key_provider=self.key_provider, + cache=self.cache, + max_age=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + max_bytes_encrypted=max_bytes_encrypted, + ) + + def encrypt(self, data: Any, provider_options: dict | None = None, **encryption_context: str) -> str: + """ + Encrypt data using the AWSEncryptionSDKProvider. + + Parameters + ------- + data : Union[bytes, str] + The data to be encrypted. + provider_options : dict + Additional options for the aws_encryption_sdk.EncryptionSDKClient + **encryption_context : str + Additional keyword arguments collected into a dictionary. + + Returns + ------- + ciphertext : str + The encrypted data, as a base64-encoded string. + """ + provider_options = provider_options or {} + self._validate_encryption_context(encryption_context) + + data_encoded = self.json_serializer(data).encode("utf-8") + + try: + ciphertext, _ = self.client.encrypt( + source=data_encoded, + materials_manager=self.cache_cmm, + encryption_context=encryption_context, + **provider_options, + ) + except GenerateKeyError: + raise DataMaskingEncryptKeyError( + "Failed to encrypt data. Please ensure you are using a valid Symmetric AWS KMS Key ARN, not KMS Key ID or alias.", # noqa E501 + ) + + return bytes_to_base64_string(ciphertext) + + def decrypt(self, data: str, provider_options: dict | None = None, **encryption_context: str) -> Any: + """ + Decrypt data using AWSEncryptionSDKProvider. + + Parameters + ------- + data : Union[bytes, str] + The encrypted data, as a base64-encoded string + provider_options + Additional options for the aws_encryption_sdk.EncryptionSDKClient + + Returns + ------- + ciphertext : bytes + The decrypted data in bytes + """ + provider_options = provider_options or {} + self._validate_encryption_context(encryption_context) + + try: + ciphertext_decoded = base64_decode(data) + except Error: + raise DataMaskingDecryptValueError( + "Data decryption failed. Please ensure that you are attempting to decrypt data that was previously encrypted.", # noqa E501 + ) + + try: + decryptor_header: MessageHeader + + ciphertext, decryptor_header = self.client.decrypt( + source=ciphertext_decoded, + key_provider=self.key_provider, + **provider_options, + ) + except DecryptKeyError: + raise DataMaskingDecryptKeyError( + "Failed to decrypt data - Please ensure you are using a valid Symmetric AWS KMS Key ARN, not KMS Key ID or alias.", # noqa E501 + ) + except (TypeError, NotSupportedError): + raise DataMaskingDecryptValueError( + "Data decryption failed. Please ensure that you are attempting to decrypt data that was previously encrypted.", # noqa E501 + ) + + self._compare_encryption_context(decryptor_header.encryption_context, encryption_context) + + decoded_ciphertext = bytes_to_string(ciphertext) + + return self.json_deserializer(decoded_ciphertext) + + @staticmethod + def _validate_encryption_context(context: dict): + if not context: + return + + for key, value in context.items(): + if not isinstance(value, str): + raise DataMaskingUnsupportedTypeError( + f"Encryption context values must be string. Received: {key}={value}", + ) + + @staticmethod + def _compare_encryption_context(actual_context: dict, expected_context: dict): + # We can safely remove encrypted data key after decryption for exact match verification + actual_context.pop(ENCRYPTED_DATA_KEY_CTX_KEY, None) + + # Encryption context could be out of order hence a set + if set(actual_context.items()) != set(expected_context.items()): + raise DataMaskingContextMismatchError( + "Encryption context does not match. You must use the exact same context used during encryption", + ) diff --git a/docs/index.md b/docs/index.md index 7f1ca98fb74..b13bbc122d8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -701,6 +701,7 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai | [**Event source data classes**](./utilities/data_classes.md){target="_blank"} | Data classes describing the schema of common Lambda event triggers | | [**Parser**](./utilities/parser.md){target="_blank"} | Data parsing and deep validation using Pydantic | | [**Idempotency**](./utilities/idempotency.md){target="_blank"} | Idempotent Lambda handler | +| [**Data Masking**](./utilities/data_masking.md){target="_blank"} | Protect confidential data with easy removal or encryption | | [**Feature Flags**](./utilities/feature_flags.md){target="_blank"} | A simple rule engine to evaluate when one or multiple features should be enabled depending on the input | | [**Streaming**](./utilities/streaming.md){target="_blank"} | Streams datasets larger than the available memory as streaming data. | diff --git a/docs/utilities/data_masking.md b/docs/utilities/data_masking.md new file mode 100644 index 00000000000..5c30edc6bff --- /dev/null +++ b/docs/utilities/data_masking.md @@ -0,0 +1,638 @@ +--- +title: Data Masking +description: Utility +--- + + + +The data masking utility can encrypt, decrypt, or irreversibly erase sensitive information to protect data confidentiality. + +```mermaid +stateDiagram-v2 + direction LR + LambdaFn: Your Lambda function + DataMasking: DataMasking + Operation: Possible operations + Input: Sensitive value + Erase: Erase + Encrypt: Encrypt + Decrypt: Decrypt + Provider: AWS Encryption SDK provider + Result: Data transformed (erased, encrypted, or decrypted) + + LambdaFn --> DataMasking + DataMasking --> Operation + + state Operation { + [*] --> Input + Input --> Erase: Irreversible + Input --> Encrypt + Input --> Decrypt + Encrypt --> Provider + Decrypt --> Provider + } + + Operation --> Result +``` + +## Key features + +* Encrypt, decrypt, or irreversibly erase data with ease +* Erase sensitive information in one or more fields within nested data +* Seamless integration with [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/introduction.html){target="_blank"} for industry and AWS security best practices + +## Terminology + +**Erasing** replaces sensitive information **irreversibly** with a non-sensitive placeholder _(`*****`)_. This operation replaces data in-memory, making it a one-way action. + +**Encrypting** transforms plaintext into ciphertext using an encryption algorithm and a cryptographic key. It allows you to encrypt any sensitive data, so only allowed personnel to decrypt it. Learn more about encryption [here](https://aws.amazon.com/blogs/security/importance-of-encryption-and-how-aws-can-help/){target="_blank"}. + +**Decrypting** transforms ciphertext back into plaintext using a decryption algorithm and the correct decryption key. + +**Encryption context** is a non-secret `key=value` data used for authentication like `tenant_id:`. This adds extra security and confirms encrypted data relationship with a context. + +**[Encrypted message](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/message-format.html){target="_blank"}** is a portable data structure that includes encrypted data along with copies of the encrypted data key. It includes everything Encryption SDK needs to validate authenticity, integrity, and to decrypt with the right master key. + + +**[Envelope encryption](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/concepts.html#envelope-encryption){target="_blank"}** uses two different keys to encrypt data safely: master and data key. The data key encrypts the plaintext, and the master key encrypts the data key. It simplifies key management _(you own the master key)_, isolates compromises to data key, and scales better with large data volumes. + + +
+```mermaid +graph LR + M(Master key) --> |Encrypts| D(Data key) + D(Data key) --> |Encrypts| S(Sensitive data) +``` +Envelope encryption visualized. +
+ +## Getting started + +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank"}. + +### Install + +!!! note "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" + +Add `aws-lambda-powertools[datamasking]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will install the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/introduction.html){target="_blank"}. + + +AWS Encryption SDK contains non-Python dependencies. This means you should use [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/using-sam-cli-build.html#using-sam-cli-build-options-container){target="_blank"} or [official build container images](https://gallery.ecr.aws/search?searchTerm=sam%2Fbuild-python&popularRegistries=amazon){target="_blank"} when building your application for AWS Lambda. Local development should work as expected. + + +### Required resources + +!!! info "By default, we use Amazon Key Management Service (KMS) for encryption and decryption operations." + +Before you start, you will need a KMS symmetric key to encrypt and decrypt your data. Your Lambda function will need read and write access to it. + +**NOTE**. We recommend setting a minimum of 1024MB of memory _(CPU intensive)_, and separate Lambda functions for encrypt and decrypt. For more information, you can see the full reports of our [load tests](https://github.com/aws-powertools/powertools-lambda-python/pull/2197#issuecomment-1730571597){target="_blank"} and [traces](https://github.com/aws-powertools/powertools-lambda-python/pull/2197#issuecomment-1732060923){target="_blank"}. + +=== "AWS Serverless Application Model (SAM) example" + ```yaml hl_lines="15 29 41 61 66-67" + --8<-- "examples/data_masking/sam/template.yaml" + ``` + + 1. [Key policy examples using IAM Roles](https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-default.html#key-policy-default-allow-administrators){target="_blank"} + 2. [SAM generated CloudFormation Resources](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-generated-resources-function.html#sam-specification-generated-resources-function-not-role){target="_blank"} + 3. Required only when using [multiple keys](#using-multiple-keys) + +### Erasing data + +Erasing will remove the original data and replace it with a `*****`. This means you cannot recover erased data, and the data type will change to `str` for all data unless the data to be erased is of an Iterable type (`list`, `tuple`, `set`), in which case the method will return a new object of the same type as the input data but with each element replaced by the string `*****`. + +=== "getting_started_erase_data.py" + ```python hl_lines="4 8 17" + --8<-- "examples/data_masking/src/getting_started_erase_data.py" + ``` + + 1. See [working with nested data](#working-with-nested-data) to learn more about the `fields` parameter.

If we omit `fields` parameter, the entire dictionary will be erased with `*****`. + +=== "generic_data_input.json" + ```json hl_lines="7 9 14" + --8<-- "examples/data_masking/src/generic_data_input.json" + ``` + +=== "getting_started_erase_data_output.json" + ```json hl_lines="5 7 12" + --8<-- "examples/data_masking/src/getting_started_erase_data_output.json" + ``` + +### Encrypting data + +???+ note "About static typing and encryption" + Encrypting data may lead to a different data type, as it always transforms into a string _(``)_. + +To encrypt, you will need an [encryption provider](#providers). Here, we will use `AWSEncryptionSDKProvider`. + +Under the hood, we delegate a [number of operations](#encrypt-operation-with-encryption-sdk-kms) to AWS Encryption SDK to authenticate, create a portable encryption message, and actual data encryption. + +=== "getting_started_encrypt_data.py" + ```python hl_lines="6-8 14-15 26" + --8<-- "examples/data_masking/src/getting_started_encrypt_data.py" + ``` + + 1. You can use more than one KMS Key for higher availability but increased latency.

Encryption SDK will ensure the data key is encrypted with both keys. + +=== "generic_data_input.json" + ```json + --8<-- "examples/data_masking/src/generic_data_input.json" + ``` + +=== "encrypt_data_output.json" + ```json + --8<-- "examples/data_masking/src/encrypt_data_output.json" + ``` + +### Decrypting data + +???+ note "About static typing and decryption" + Decrypting data may lead to a different data type, as encrypted data is always a string _(``)_. + +To decrypt, you will need an [encryption provider](#providers). Here, we will use `AWSEncryptionSDKProvider`. + +Under the hood, we delegate a [number of operations](#decrypt-operation-with-encryption-sdk-kms) to AWS Encryption SDK to verify authentication, integrity, and actual ciphertext decryption. + +=== "getting_started_decrypt_data.py" + + **NOTE**. Decryption only works with KMS Key ARN. + + ```python hl_lines="6-7 12-13 24" + --8<-- "examples/data_masking/src/getting_started_decrypt_data.py" + ``` + + 1. Note that KMS key alias or key ID won't work. + 2. You can use more than one KMS Key for higher availability but increased latency.

Encryption SDK will call `Decrypt` API with all master keys when trying to decrypt the data key. + +=== "getting_started_decrypt_data_input.json" + + ```json + --8<-- "examples/data_masking/src/getting_started_decrypt_data_input.json" + ``` + +=== "getting_started_decrypt_data_output.json" + + ```json + --8<-- "examples/data_masking/src/getting_started_decrypt_data_output.json" + ``` + +### Encryption context for integrity and authenticity + + +For a stronger security posture, you can add metadata to each encryption operation, and verify them during decryption. This is known as additional authenticated data (AAD). These are non-sensitive data that can help protect authenticity and integrity of your encrypted data, and even help to prevent a [confused deputy](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html){target="_blank"} situation. + + +???+ danger "Important considerations you should know" + 1. **Exact match verification on decrypt**. Be careful using random data like `timestamps` as encryption context if you can't provide them on decrypt. + 2. **Only `string` values are supported**. We will raise `DataMaskingUnsupportedTypeError` for non-string values. + 3. **Use non-sensitive data only**. When using KMS, encryption context is available as plaintext in AWS CloudTrail, unless you [intentionally disabled KMS events](https://docs.aws.amazon.com/kms/latest/developerguide/logging-using-cloudtrail.html#filtering-kms-events){target="_blank"}. + +=== "getting_started_encryption_context.py" + + ```python hl_lines="26-28" + --8<-- "examples/data_masking/src/getting_started_encryption_context.py" + ``` + + 1. They must match on `decrypt()` otherwise the operation will fail with `DataMaskingContextMismatchError`. + +=== "getting_started_decryption_context.py" + + ```python hl_lines="26-28" + --8<-- "examples/data_masking/src/getting_started_decryption_context.py" + ``` + + 1. They must match otherwise the operation will fail with `DataMaskingContextMismatchError`. + +### Choosing parts of your data + +???+ note "Current limitations" + 1. The `fields` parameter is not yet supported in `encrypt` and `decrypt` operations. + 2. We support `JSON` data types only - see [data serialization for more details](#data-serialization). + +You can use the `fields` parameter with the dot notation `.` to choose one or more parts of your data to `erase`. This is useful when you want to keep data structure intact except the confidential fields. + +When `fields` is present, `erase` behaves differently: + +| Operation | Behavior | Example | Result | +| --------- | ----------------------------------------------------------- | ----------------------- | ------------------------------- | +| `erase` | Replace data while keeping collections type intact. | `{"cards": ["a", "b"]}` | `{"cards": ["*****", "*****"]}` | + +Here are common scenarios to best visualize how to use `fields`. + +=== "Top keys only" + + You want to erase data in the `card_number` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["card_number"])` + + ```json hl_lines="4" + --8<-- "examples/data_masking/src/choosing_payload_top_keys.json" + ``` + + === "Result" + + ```json hl_lines="4" + --8<-- "examples/data_masking/src/choosing_payload_top_keys_output.json" + ``` + +=== "Nested key" + + You want to erase data in the `postcode` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address.postcode"])` + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_nested_key.json" + ``` + + === "Result" + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_nested_key_output.json" + ``` + +=== "Multiple keys" + + You want to erase data in both `postcode` and `street` fields. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address.postcode", "address.street"])` + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_multiple_keys.json" + ``` + + === "Result" + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_multiple_keys_output.json" + ``` + +=== "All key items" + + You want to erase data under `address` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address"])` + + ```json hl_lines="6-17" + --8<-- "examples/data_masking/src/choosing_payload_all_nested_keys.json" + ``` + + === "Result" + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_all_nested_keys_output.json" + ``` + +=== "Complex nested key" + + You want to erase data under `name` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["category..name"])` + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_complex_nested_keys.json" + ``` + + === "Result" + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_complex_nested_keys_output.json" + ``` + +=== "All fields in a list" + + You want to erase data under `street` field located at the any index of the address list. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address[*].street"])` + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_list_all_index.json" + ``` + + === "Result" + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_list_all_index_output.json" + ``` + +=== "Slicing a list" + + You want to erase data by slicing a list. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address[-1].street"])` + + ```json hl_lines="16" + --8<-- "examples/data_masking/src/choosing_payload_list_slice.json" + ``` + + === "Result" + + ```json hl_lines="16" + --8<-- "examples/data_masking/src/choosing_payload_list_slice_output.json" + ``` + +=== "Complex expressions" + + You want to erase data by finding for a field with conditional expression. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["$.address[?(@.postcode > 12000)]"])` + + > `$`: Represents the root of the JSON structure. + + > `.address`: Selects the "address" property within the JSON structure. + + > `(@.postcode > 12000)`: Specifies the condition that elements should meet. It selects elements where the value of the `postcode` property is `greater than 12000`. + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_complex_search.json" + ``` + + === "Result" + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_complex_search_output.json" + ``` + +For comprehensive guidance on using JSONPath syntax, please refer to the official documentation available at [jsonpath-ng](https://github.com/h2non/jsonpath-ng#jsonpath-syntax){target="_blank" rel="nofollow"} + +#### JSON + +We also support data in JSON string format as input. We automatically deserialize it, then handle each field operation as expected. + +Note that the return will be a deserialized JSON and your desired fields updated. + +=== "Data" + + Expression: `data_masker.erase(data, fields=["card_number", "address.postcode"])` + + ```json + --8<-- "examples/data_masking/src/choosing_payload_simple_json.json" + ``` + +=== "Result" + + ```json + --8<-- "examples/data_masking/src/choosing_payload_simple_json_output.json" + ``` + +## Advanced + +### Data serialization + +???+ note "Current limitations" + 1. Python classes, `Dataclasses`, and `Pydantic models` are not supported yet. + +Before we traverse the data structure, we perform two important operations on input data: + +1. If `JSON string`, **deserialize** using default or provided deserializer. +2. If `dictionary`, **normalize** into `JSON` to prevent traversing unsupported data types. + +When decrypting, we revert the operation to restore the original data structure. + +For compatibility or performance, you can optionally pass your own JSON serializer and deserializer to replace `json.dumps` and `json.loads` respectively: + +```python hl_lines="17-18" title="advanced_custom_serializer.py" +--8<-- "examples/data_masking/src/advanced_custom_serializer.py" +``` + +### Using multiple keys + +You can use multiple KMS keys from more than one AWS account for higher availability, when instantiating `AWSEncryptionSDKProvider`. + +```python hl_lines="15" title="using_multiple_keys.py" +--8<-- "examples/data_masking/src/using_multiple_keys.py" +``` + +### Providers + +#### AWS Encryption SDK + +You can modify the following values when initializing the `AWSEncryptionSDKProvider` to best accommodate your security and performance thresholds. + +| Parameter | Default | Description | +| -------------------------- | --------------------- | --------------------------------------------------------------------------------------------- | +| **local_cache_capacity** | `100` | The maximum number of entries that can be retained in the local cryptographic materials cache | +| **max_cache_age_seconds** | `300` | The maximum time (in seconds) that a cache entry may be kept in the cache | +| **max_messages_encrypted** | `4294967296` | The maximum number of messages that may be encrypted under a cache entry | +| **max_bytes_encrypted** | `9223372036854775807` | The maximum number of bytes that may be encrypted under a cache entry | + +If required, you can customize the default values when initializing the `AWSEncryptionSDKProvider` class. + +```python hl_lines="14-19" title="aws_encryption_provider_example.py" +--8<-- "examples/data_masking/src/aws_encryption_provider_example.py" +``` + +##### Passing additional SDK arguments + +!!! note "See the [AWS Encryption SDK docs for more details](https://aws-encryption-sdk-python.readthedocs.io/en/latest/generated/aws_encryption_sdk.html#aws_encryption_sdk.EncryptionSDKClient.encrypt){target="_blank"}" + +As an escape hatch mechanism, you can pass additional arguments to the `AWSEncryptionSDKProvider` via the `provider_options` parameter. + +For example, the AWS Encryption SDK defaults to using the `AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384` algorithm for encrypting your Data Key. If you want, you have the flexibility to customize and choose a different encryption algorithm. + +```python hl_lines="5 26 30" title="changing_default_algorithm.py" +--8<-- "examples/data_masking/src/changing_default_algorithm.py" +``` + +### Data masking request flow + +The following sequence diagrams explain how `DataMasking` behaves under different scenarios. + +#### Erase operation + +Erasing operations occur in-memory and we cannot recover the original value. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking (in memory) + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: erase(data) + DataMasking->>DataMasking: replaces data with ***** + Note over Lambda,DataMasking: No encryption providers involved. + DataMasking->>Lambda: data masked + Lambda-->>Client: Return response +``` +Simple masking operation +
+ +#### Encrypt operation with Encryption SDK (KMS) + +We call KMS to generate an unique data key that can be used for multiple `encrypt` operation in-memory. It improves performance, cost and prevent throttling. + +To make this operation simpler to visualize, we keep caching details in a [separate sequence diagram](#caching-encrypt-operations-with-encryption-sdk). Caching is enabled by default. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, data key encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Encrypting operation using envelope encryption. +
+ +#### Encrypt operation with multiple KMS Keys + +When encrypting data with multiple KMS keys, the `aws_encryption_sdk` makes additional API calls to encrypt the data with each of the specified keys. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KEY_1, KEY_2]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API - KEY_1 + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: KEY_1.encrypt(DATA_KEY) + loop For every additional KMS Key + DataMasking->>EncryptionProvider: Encrypt DATA_KEY + Note over DataMasking,EncryptionProvider: KMS Encrypt API - KEY_2 + end + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, all data keys encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Encrypting operation using envelope encryption. +
+ +#### Decrypt operation with Encryption SDK (KMS) + +We call KMS to decrypt the encrypted data key available in the encrypted message. If successful, we run authentication _(context)_ and integrity checks (_algorithm, data key length, etc_) to confirm its proceedings. + +Lastly, we decrypt the original encrypted data, throw away the decrypted data key for security reasons, and return the original plaintext data. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: decrypt(data) + DataMasking->>EncryptionProvider: Decrypt encrypted data key + Note over DataMasking,EncryptionProvider: KMS Decrypt API + DataMasking->>DataMasking: Authentication and integrity checks + DataMasking->>DataMasking: DATA_KEY.decrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Discards decrypted data key + DataMasking->>Lambda: Plaintext + Lambda-->>Client: Return response +``` +Decrypting operation using envelope encryption. +
+ +#### Caching encrypt operations with Encryption SDK + +Without caching, every `encrypt()` operation would generate a new data key. It significantly increases latency and cost for ephemeral and short running environments like Lambda. + +With caching, we balance ephemeral Lambda environment performance characteristics with [adjustable thresholds](#aws-encryption-sdk) to meet your security needs. + +!!! info "Data key recycling" + We request a new data key when a cached data key exceeds any of the following security thresholds: + + 1. **Max age in seconds** + 2. **Max number of encrypted messages** + 3. **Max bytes encrypted** across all operations + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, data key encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda->>DataMasking: encrypt(another_data) + DataMasking->>DataMasking: Searches for data key in cache + alt Is Data key in cache? + DataMasking->>DataMasking: Reuses data key + else Is Data key evicted from cache? + DataMasking->>EncryptionProvider: Create unique data key + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + end + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: Create encrypted message + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Caching data keys during encrypt operation. +
+ +## Testing your code + +### Testing erase operation + +Testing your code with a simple erase operation + +=== "test_lambda_mask.py" + +```python hl_lines="22" +--8<-- "examples/data_masking/tests/test_lambda_mask.py" +``` + +=== "lambda_mask.py" + +```python hl_lines="3 12" +--8<-- "examples/data_masking/tests/lambda_mask.py" +``` diff --git a/examples/data_masking/sam/template.yaml b/examples/data_masking/sam/template.yaml new file mode 100644 index 00000000000..67d5d923515 --- /dev/null +++ b/examples/data_masking/sam/template.yaml @@ -0,0 +1,67 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: > + Powertools for AWS Lambda (Python) data masking example + +Globals: # https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html + Function: + Timeout: 5 + Runtime: python3.11 + Tracing: Active + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: INFO + KMS_KEY_ARN: !GetAtt DataMaskingMasterKey.Arn + +# In production, we recommend you split up the encrypt and decrypt for fine-grained security. +# For example, one function can act as the encryption proxy via HTTP requests, data pipeline, etc., +# while only authorized personnel can call decrypt via a separate function. +Resources: + DataMaskingEncryptFunctionExample: + Type: AWS::Serverless::Function + Properties: + Handler: data_masking_function_example.lambda_handler + CodeUri: ../src + Description: Data Masking encryption function + # Cryptographic operations demand more CPU. CPU is proportionally allocated based on memory size. + # We recommend allocating a minimum of 1024MB of memory. + MemorySize: 1024 + + # DataMaskingDecryptFunctionExample: + # Type: AWS::Serverless::Function + # Properties: + # Handler: data_masking_function_decrypt.lambda_handler + # CodeUri: ../src + # Description: Data Masking decryption function + # MemorySize: 1024 + + # KMS Key + DataMaskingMasterKey: + Type: "AWS::KMS::Key" + Properties: + Description: KMS Key for encryption and decryption using Powertools for AWS Lambda Data masking feature + # KMS Key support both IAM Resource Policies and Key Policies + # For more details: https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html + KeyPolicy: + Version: "2012-10-17" + Id: data-masking-enc-dec + Statement: + # For security reasons, ensure your KMS Key has at least one administrator. + # In this example, the root account is granted administrator permissions. + # However, we recommended configuring specific IAM Roles for enhanced security in production. + - Effect: Allow + Principal: + AWS: !Sub "arn:aws:iam::${AWS::AccountId}:root" # (1)! + Action: "kms:*" + Resource: "*" + # We must grant Lambda's IAM Role access to the KMS Key + - Effect: Allow + Principal: + AWS: !GetAtt DataMaskingEncryptFunctionExampleRole.Arn # (2)! + Action: + - kms:Decrypt # to decrypt encrypted data key + - kms:GenerateDataKey # to create an unique and random data key for encryption + # Encrypt permission is required only when using multiple keys + - kms:Encrypt # (3)! + Resource: "*" diff --git a/examples/data_masking/src/advanced_custom_serializer.py b/examples/data_masking/src/advanced_custom_serializer.py new file mode 100644 index 00000000000..f870624bccb --- /dev/null +++ b/examples/data_masking/src/advanced_custom_serializer.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +import ujson + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider( + keys=[KMS_KEY_ARN], + json_serializer=ujson.dumps, + json_deserializer=ujson.loads, +) +data_masker = DataMasking(provider=encryption_provider) + + +def lambda_handler(event: dict, context: LambdaContext) -> str: + data: dict = event.get("body", {}) + + return data_masker.encrypt(data) diff --git a/examples/data_masking/src/aws_encryption_provider_example.py b/examples/data_masking/src/aws_encryption_provider_example.py new file mode 100644 index 00000000000..2ef34a82934 --- /dev/null +++ b/examples/data_masking/src/aws_encryption_provider_example.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider( + keys=[KMS_KEY_ARN], + local_cache_capacity=200, + max_cache_age_seconds=400, + max_messages_encrypted=200, + max_bytes_encrypted=2000) + +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/src/changing_default_algorithm.py b/examples/data_masking/src/changing_default_algorithm.py new file mode 100644 index 00000000000..27d52905459 --- /dev/null +++ b/examples/data_masking/src/changing_default_algorithm.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import os + +from aws_encryption_sdk.identifiers import Algorithm + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> str: + data: dict = event.get("body", {}) + + logger.info("Encrypting whole object with a different algorithm") + + provider_options = {"algorithm": Algorithm.AES_256_GCM_HKDF_SHA512_COMMIT_KEY} + + encrypted = data_masker.encrypt( + data, + provider_options=provider_options, + ) + + return encrypted diff --git a/examples/data_masking/src/choosing_payload_all_nested_keys.json b/examples/data_masking/src/choosing_payload_all_nested_keys.json new file mode 100644 index 00000000000..7fad154c03e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_all_nested_keys.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street", + "country": "United States", + "timezone": "America/La_Paz" + }, + { + "postcode": 67890, + "street": "100 Main Street", + "country": "United States", + "timezone": "America/Mazatlan" + } + ] +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_all_nested_keys_output.json b/examples/data_masking/src/choosing_payload_all_nested_keys_output.json new file mode 100644 index 00000000000..a28bfee974e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_all_nested_keys_output.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + "*****", + "*****" + ] +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_complex_nested_keys.json b/examples/data_masking/src/choosing_payload_complex_nested_keys.json new file mode 100644 index 00000000000..7096e0074d9 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_nested_keys.json @@ -0,0 +1,11 @@ +{ + "category": { + "subcategory": { + "brand" : { + "product": { + "name": "Car" + } + } + } + } +} diff --git a/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json b/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json new file mode 100644 index 00000000000..843c8c7e1ce --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json @@ -0,0 +1,11 @@ +{ + "category": { + "subcategory": { + "brand" : { + "product": { + "name": "*****" + } + } + } + } +} diff --git a/examples/data_masking/src/choosing_payload_complex_search.json b/examples/data_masking/src/choosing_payload_complex_search.json new file mode 100644 index 00000000000..e8db38a79ad --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_search.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Drive" + }, + { + "postcode": 67890, + "street": "111 Main Street" + }, + { + "postcode": 11111, + "street": "100 Any Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_complex_search_output.json b/examples/data_masking/src/choosing_payload_complex_search_output.json new file mode 100644 index 00000000000..6198e27c09a --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_search_output.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "*****" + }, + { + "postcode": 67890, + "street": "*****" + }, + { + "postcode": 11111, + "street": "100 Any Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_all_index.json b/examples/data_masking/src/choosing_payload_list_all_index.json new file mode 100644 index 00000000000..670e3c420be --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_all_index.json @@ -0,0 +1,15 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Drive" + }, + { + "postcode": 67890, + "street": "100 Main Street," + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_all_index_output.json b/examples/data_masking/src/choosing_payload_list_all_index_output.json new file mode 100644 index 00000000000..8fb1f1b1c6d --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_all_index_output.json @@ -0,0 +1,16 @@ + +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "*****" + }, + { + "postcode": 67890, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_index.json b/examples/data_masking/src/choosing_payload_list_index.json new file mode 100644 index 00000000000..0f543b42f5f --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_index.json @@ -0,0 +1,15 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_index_output.json b/examples/data_masking/src/choosing_payload_list_index_output.json new file mode 100644 index 00000000000..1481d78f4b6 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_index_output.json @@ -0,0 +1,16 @@ + +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_slice.json b/examples/data_masking/src/choosing_payload_list_slice.json new file mode 100644 index 00000000000..c8a9f7f58af --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_slice.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + }, + { + "postcode": 78495, + "street": "111 Any Drive" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_slice_output.json b/examples/data_masking/src/choosing_payload_list_slice_output.json new file mode 100644 index 00000000000..efab8b03400 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_slice_output.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + }, + { + "postcode": 11111, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_multiple_keys.json b/examples/data_masking/src/choosing_payload_multiple_keys.json new file mode 100644 index 00000000000..640c274868e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_multiple_keys.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": 12345, + "street": "123 Any Street" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_multiple_keys_output.json b/examples/data_masking/src/choosing_payload_multiple_keys_output.json new file mode 100644 index 00000000000..fca3391f2f4 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_multiple_keys_output.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": "*****", + "street": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_nested_key.json b/examples/data_masking/src/choosing_payload_nested_key.json new file mode 100644 index 00000000000..e3ff995026f --- /dev/null +++ b/examples/data_masking/src/choosing_payload_nested_key.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": 12345 + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_nested_key_output.json b/examples/data_masking/src/choosing_payload_nested_key_output.json new file mode 100644 index 00000000000..463f5a943f3 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_nested_key_output.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_simple_json.json b/examples/data_masking/src/choosing_payload_simple_json.json new file mode 100644 index 00000000000..057d43087f0 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_simple_json.json @@ -0,0 +1 @@ +'{"name": "Carlos", "operation": "non sensitive", "card_number": "1111 2222 3333 4444", "address": {"postcode": 12345}}' \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_simple_json_output.json b/examples/data_masking/src/choosing_payload_simple_json_output.json new file mode 100644 index 00000000000..b8920dc9696 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_simple_json_output.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "*****", + "address": { + "postcode": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_top_keys.json b/examples/data_masking/src/choosing_payload_top_keys.json new file mode 100644 index 00000000000..dce6ed78780 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_top_keys.json @@ -0,0 +1,5 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444" +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_top_keys_output.json b/examples/data_masking/src/choosing_payload_top_keys_output.json new file mode 100644 index 00000000000..c7d877cb804 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_top_keys_output.json @@ -0,0 +1,5 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "*****" +} \ No newline at end of file diff --git a/examples/data_masking/src/data_masking_function_example.py b/examples/data_masking/src/data_masking_function_example.py new file mode 100644 index 00000000000..e7ed3326890 --- /dev/null +++ b/examples/data_masking/src/data_masking_function_example.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +tracer = Tracer() +logger = Logger() + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + logger.info("Hello world function - HTTP 200") + + data = event["body"] + + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) + encrypted = data_masker.encrypt(data) + decrypted = data_masker.decrypt(encrypted) + return {"Decrypted_json": decrypted} diff --git a/examples/data_masking/src/data_masking_function_example_output.json b/examples/data_masking/src/data_masking_function_example_output.json new file mode 100644 index 00000000000..87601e79ee4 --- /dev/null +++ b/examples/data_masking/src/data_masking_function_example_output.json @@ -0,0 +1,34 @@ +{ + "Decrypted_json": { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "phone_numbers": [ + "+1-555-555-1234", + "+1-555-555-5678" + ], + "interests": [ + "Hiking", + "Traveling", + "Photography", + "Reading" + ], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr." + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31" + }, + "about_me": "\n Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis\n sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,\n ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.\n Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,\n risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin\n interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat\n volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.\n Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus\n malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.\n " + } + } \ No newline at end of file diff --git a/examples/data_masking/src/encrypt_data_output.json b/examples/data_masking/src/encrypt_data_output.json new file mode 100644 index 00000000000..06e32c83804 --- /dev/null +++ b/examples/data_masking/src/encrypt_data_output.json @@ -0,0 +1,3 @@ +{ + "body": "AgV4uF5K2YMtNhYrtviTwKNrUHhqQr73l/jNfukkh+qLOC8AXwABABVhd3MtY3J5cHRvLXB1YmxpYy1rZXkAREEvcjEyaFZHY1R5cjJuTDNKbTJ3UFA3R3ZjaytIdi9hekZqbXVUb25Ya3J5SzFBOUlJZDZxZXpSR1NTVnZDUUxoZz09AAEAB2F3cy1rbXMAS2Fybjphd3M6a21zOnVzLWVhc3QtMToyMDA5ODQxMTIzODY6a2V5LzZkODJiMzRlLTM2NjAtNDRlMi04YWJiLTdmMzA1OGJlYTIxMgC4AQIBAHjxYXAO7wQGd+7qxoyvXAajwqboF5FL/9lgYUNJTB8VtAHBP2hwVgw+zypp7GoMNTPAAAAAfjB8BgkqhkiG9w0BBwagbzBtAgEAMGgGCSqGSIb3DQEHATAeBglghkgBZQMEAS4wEQQMx/B25MTgWwpL7CmuAgEQgDtan3orAOKFUfyNm3v6rFcglb+BVVVDV71fj4aRljhpg1ixsYFaKsoej8NcwRktIiWE+mw9XmTEVb6xFQIAABAA9DeLzlRaRQgTcXMJG0iBu/YTyyDKiROD+bU1Y09X9RBz5LA1nWIENJKq2seAhNSB/////wAAAAEAAAAAAAAAAAAAAAEAAAEBExLJ9wI4n7t+wyPEEP4kjYFBdkmNuLLsVC2Yt8mv9Y1iH2G+/g9SaIcdK57pkoW0ECpBxZVOxCuhmK2s74AJCUdem9McjS1waUKyzYTi9vv2ySNBsABIDwT990rE7jZJ3tEZAqcWZg/eWlxvnksFR/akBWZKsKzFz6lF57+cTgdISCEJRV0E7fcUeCuaMaQGK1Qw2OCmIeHEG5j5iztBkZG2IB2CVND/AbxmDUFHwgjsrJPTzaDYSufcGMoZW1A9X1sLVfqNVKvnOFP5tNY7kPF5eAI9FhGBw8SjTqODXz4k6zuqzy9no8HtXowP265U8NZ5VbVTd/zuVEbZyK5KBqzP1sExW4RhnlpXMoOs9WSuAGcwZQIxANTeEwb9V7CacV2Urt/oCqysUzhoV2AcT2ZjryFqY79Tsg+FRpIx7cBizL4ieRzbhQIwcRasNncO5OZOcmVr0MqHv+gCVznndMgjXJmWwUa7h6skJKmhhMPlN0CsugxtVWnD" +} diff --git a/examples/data_masking/src/generic_data_input.json b/examples/data_masking/src/generic_data_input.json new file mode 100644 index 00000000000..60ab0aa278e --- /dev/null +++ b/examples/data_masking/src/generic_data_input.json @@ -0,0 +1,21 @@ +{ + "body": + { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": { + "street": "456 ACME Ave", + "city": "Anytown", + "state": "CA", + "zip": "12345" + } + } +} \ No newline at end of file diff --git a/examples/data_masking/src/getting_started_decrypt_data.py b/examples/data_masking/src/getting_started_decrypt_data.py new file mode 100644 index 00000000000..d8e746a8dfe --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") # (1)! + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) # (2)! +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Decrypting whole object") + + decrypted = data_masker.decrypt(data) + + return decrypted diff --git a/examples/data_masking/src/getting_started_decrypt_data_input.json b/examples/data_masking/src/getting_started_decrypt_data_input.json new file mode 100644 index 00000000000..06e32c83804 --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data_input.json @@ -0,0 +1,3 @@ +{ + "body": "AgV4uF5K2YMtNhYrtviTwKNrUHhqQr73l/jNfukkh+qLOC8AXwABABVhd3MtY3J5cHRvLXB1YmxpYy1rZXkAREEvcjEyaFZHY1R5cjJuTDNKbTJ3UFA3R3ZjaytIdi9hekZqbXVUb25Ya3J5SzFBOUlJZDZxZXpSR1NTVnZDUUxoZz09AAEAB2F3cy1rbXMAS2Fybjphd3M6a21zOnVzLWVhc3QtMToyMDA5ODQxMTIzODY6a2V5LzZkODJiMzRlLTM2NjAtNDRlMi04YWJiLTdmMzA1OGJlYTIxMgC4AQIBAHjxYXAO7wQGd+7qxoyvXAajwqboF5FL/9lgYUNJTB8VtAHBP2hwVgw+zypp7GoMNTPAAAAAfjB8BgkqhkiG9w0BBwagbzBtAgEAMGgGCSqGSIb3DQEHATAeBglghkgBZQMEAS4wEQQMx/B25MTgWwpL7CmuAgEQgDtan3orAOKFUfyNm3v6rFcglb+BVVVDV71fj4aRljhpg1ixsYFaKsoej8NcwRktIiWE+mw9XmTEVb6xFQIAABAA9DeLzlRaRQgTcXMJG0iBu/YTyyDKiROD+bU1Y09X9RBz5LA1nWIENJKq2seAhNSB/////wAAAAEAAAAAAAAAAAAAAAEAAAEBExLJ9wI4n7t+wyPEEP4kjYFBdkmNuLLsVC2Yt8mv9Y1iH2G+/g9SaIcdK57pkoW0ECpBxZVOxCuhmK2s74AJCUdem9McjS1waUKyzYTi9vv2ySNBsABIDwT990rE7jZJ3tEZAqcWZg/eWlxvnksFR/akBWZKsKzFz6lF57+cTgdISCEJRV0E7fcUeCuaMaQGK1Qw2OCmIeHEG5j5iztBkZG2IB2CVND/AbxmDUFHwgjsrJPTzaDYSufcGMoZW1A9X1sLVfqNVKvnOFP5tNY7kPF5eAI9FhGBw8SjTqODXz4k6zuqzy9no8HtXowP265U8NZ5VbVTd/zuVEbZyK5KBqzP1sExW4RhnlpXMoOs9WSuAGcwZQIxANTeEwb9V7CacV2Urt/oCqysUzhoV2AcT2ZjryFqY79Tsg+FRpIx7cBizL4ieRzbhQIwcRasNncO5OZOcmVr0MqHv+gCVznndMgjXJmWwUa7h6skJKmhhMPlN0CsugxtVWnD" +} diff --git a/examples/data_masking/src/getting_started_decrypt_data_output.json b/examples/data_masking/src/getting_started_decrypt_data_output.json new file mode 100644 index 00000000000..7871a0416e7 --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data_output.json @@ -0,0 +1,18 @@ +{ + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": { + "street": "456 ACME Ave", + "city": "Anytown", + "state": "CA", + "zip": "12345" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/getting_started_decryption_context.py b/examples/data_masking/src/getting_started_decryption_context.py new file mode 100644 index 00000000000..f4b0f6d8ac3 --- /dev/null +++ b/examples/data_masking/src/getting_started_decryption_context.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data = event.get("body", {}) + + logger.info("Decrypting whole object") + + decrypted: dict = data_masker.decrypt( + data, + data_classification="confidential", # (1)! + data_type="customer-data", + tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba", + ) + + return decrypted diff --git a/examples/data_masking/src/getting_started_encrypt_data.py b/examples/data_masking/src/getting_started_encrypt_data.py new file mode 100644 index 00000000000..579170113dd --- /dev/null +++ b/examples/data_masking/src/getting_started_encrypt_data.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) # (1)! +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/src/getting_started_encryption_context.py b/examples/data_masking/src/getting_started_encryption_context.py new file mode 100644 index 00000000000..6fea5dc9f65 --- /dev/null +++ b/examples/data_masking/src/getting_started_encryption_context.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> str: + data = event.get("body", {}) + + logger.info("Encrypting whole object") + + encrypted: str = data_masker.encrypt( + data, + data_classification="confidential", # (1)! + data_type="customer-data", + tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba", + ) + + return encrypted diff --git a/examples/data_masking/src/getting_started_erase_data.py b/examples/data_masking/src/getting_started_erase_data.py new file mode 100644 index 00000000000..a3e9fc7217e --- /dev/null +++ b/examples/data_masking/src/getting_started_erase_data.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = Logger() +data_masker = DataMasking() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Erasing fields email, address.street, and company_address") + + erased = data_masker.erase(data, fields=["email", "address.street", "company_address"]) # (1)! + + return erased diff --git a/examples/data_masking/src/getting_started_erase_data_output.json b/examples/data_masking/src/getting_started_erase_data_output.json new file mode 100644 index 00000000000..76a43cc81e7 --- /dev/null +++ b/examples/data_masking/src/getting_started_erase_data_output.json @@ -0,0 +1,13 @@ +{ + "id": 1, + "name": "John Doe", + "age": 30, + "email": "*****", + "address": { + "street": "*****", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": "*****" +} \ No newline at end of file diff --git a/examples/data_masking/src/large_data_input.json b/examples/data_masking/src/large_data_input.json new file mode 100644 index 00000000000..34275c3fa73 --- /dev/null +++ b/examples/data_masking/src/large_data_input.json @@ -0,0 +1,32 @@ +{ + "body": + { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr." + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31" + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """ + } +} diff --git a/examples/data_masking/src/using_multiple_keys.py b/examples/data_masking/src/using_multiple_keys.py new file mode 100644 index 00000000000..45c49f467d3 --- /dev/null +++ b/examples/data_masking/src/using_multiple_keys.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN_1 = os.getenv("KMS_KEY_ARN_1", "") +KMS_KEY_ARN_2 = os.getenv("KMS_KEY_ARN_2", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN_1, KMS_KEY_ARN_2]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/tests/lambda_mask.py b/examples/data_masking/tests/lambda_mask.py new file mode 100644 index 00000000000..6b2f461e663 --- /dev/null +++ b/examples/data_masking/tests/lambda_mask.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.typing import LambdaContext + +data_masker = DataMasking() + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data = event + + erased = data_masker.erase(data, fields=["testkey"]) + + return erased diff --git a/examples/data_masking/tests/test_lambda_mask.py b/examples/data_masking/tests/test_lambda_mask.py new file mode 100644 index 00000000000..596f065b380 --- /dev/null +++ b/examples/data_masking/tests/test_lambda_mask.py @@ -0,0 +1,30 @@ +from dataclasses import dataclass + +import pytest +import test_lambda_mask + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:111111111:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 5 + + return LambdaContext() + + +def test_encrypt_lambda(lambda_context): + # GIVEN: A sample event for testing + event = {"testkey": "testvalue"} + + # WHEN: Invoking the lambda_handler function with the sample event and Lambda context + result = test_lambda_mask.lambda_handler(event, lambda_context) + + # THEN: Assert that the result matches the expected output + assert result == {"testkey": "*****"} diff --git a/mkdocs.yml b/mkdocs.yml index a862430a054..50fe632539c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -29,6 +29,7 @@ nav: - utilities/data_classes.md - utilities/parser.md - utilities/idempotency.md + - utilities/data_masking.md - utilities/feature_flags.md - utilities/streaming.md - utilities/middleware_factory.md diff --git a/mypy.ini b/mypy.ini index cb2d3ce2443..5fcb1533707 100644 --- a/mypy.ini +++ b/mypy.ini @@ -12,12 +12,15 @@ disable_error_code = annotation-unchecked [mypy-jmespath] ignore_missing_imports=True -[mypy-aws_encryption_sdk] +[mypy-aws_encryption_sdk.*] ignore_missing_imports=True [mypy-sentry_sdk] ignore_missing_imports=True +[mypy-jsonpath_ng.*] +ignore_missing_imports=True + [mypy-jmespath.exceptions] ignore_missing_imports=True @@ -71,3 +74,6 @@ ignore_missing_imports = True [mypy-importlib.metadata] ignore_missing_imports = True + +[mypy-ujson] +ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index b6bf62d37bb..8e7fcad2cd9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -768,58 +768,67 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "datadog" -version = "0.47.0" +version = "0.48.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"}, - {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"}, + {file = "datadog-0.48.0-py2.py3-none-any.whl", hash = "sha256:c3f819e2dc632a546a5b4e8d45409e996d4fa18c60df7814c82eda548e0cca59"}, + {file = "datadog-0.48.0.tar.gz", hash = "sha256:d4d661358c3e7f801fbfe15118f5ccf08b9bd9b1f45b8b910605965283edad64"}, ] [package.dependencies] @@ -867,71 +876,71 @@ six = "*" [[package]] name = "ddtrace" -version = "2.4.0" +version = "2.5.2" description = "Datadog APM client library" optional = false python-versions = ">=3.7" files = [ - {file = "ddtrace-2.4.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:90641de597d3424573aa96263509800bb64018727bf74e29e250e6d21200a4be"}, - {file = "ddtrace-2.4.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:75b7d01af5fb8d279a2edb56d48af0dc221ed43f4e5049387e4a9be529217033"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f104933ffbae735887e10e3e0d9a5d28dd7d42d1fd86141c4fa171c07598b561"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675545d2fd7c5be10fe704a3f151add0ce8b101c976ca0ab452699aac0d8489"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b380dabf377a318ebd909423293b02beaa43ffda03ad129a5a93c4a1a4b5c6"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2f93337c1546404967525388a45174481daa72ecf7d3a1e4c21349e1a2d572c"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0e345e034e8962d76642ab2763f5bdb1bc4424c2ea17d9ca5f82e093160d6ca1"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa5e1a7121d08d50795e3f6218f3959cfa55363a3896210410ef354a7573de9"}, - {file = "ddtrace-2.4.0-cp310-cp310-win32.whl", hash = "sha256:d9c69a42919a27cff8d42461b301014d79683c40f60d0cb5f3000e4ff7cb907f"}, - {file = "ddtrace-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:962de6a60f42e2cde1823c47a3383bb0d6beaa954d57b12687688935d0ddd3d3"}, - {file = "ddtrace-2.4.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ed91c32353c8288fb95de67faa341c5ab9a089c0161ad51fc739f0db2b46866e"}, - {file = "ddtrace-2.4.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:410c9b9241ed2514dc9413887d852140cc7ff396b40ffc412835a14668b9b1a3"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639b11f780d0ed1a372a2a6b92cc1b9c586a0fea27439557e768d5ebedabbc34"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08861e4acd61198428f0d994db1bc5d2893ec816b9cd78c0c6d1fc963f0dc771"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad627a4611bff8f527e2c0c0fc51be9d74a563328269f53b871901570ee4ff3"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6ae2f75f2edc068d6c104ceb0e882a6dfad8f702b27384b3dac5290aebbc248"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82a0832000fedcb95856477bab95c6f151fa28ede3aceafaabe7c08beffaa577"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8b1baac10f9cc3803854f802062e02ae5de0d5546f19165c3b6e8840e9b09f4"}, - {file = "ddtrace-2.4.0-cp311-cp311-win32.whl", hash = "sha256:c687fe20b17e2d24de222913dc2383e6b1462641d8ff18d27678dcb72ced82a3"}, - {file = "ddtrace-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:47296b116a97e01fe6bf48a4eea4e825212ee23288ee064964ab87ba608fc038"}, - {file = "ddtrace-2.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6e2b2b4160ea53dd3e4f8bb35af7124a5e8954c8badffa81468c8a62d12acc51"}, - {file = "ddtrace-2.4.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:49ac0d69f98a4ff2175db39481598300fd94f038a027b537d0a66d9dbeca1ed7"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2604e1c91b44d3b6fb15d0337cda1ac2c15aec215f6a44e1bb39d25b47c2633c"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb7d2c846e3d7e8156199855d4db014a71d62daedba84a213416e2a488e834b3"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85774e12d5d92152cd1c64f3a8a2f4dbe7f3d39201f8a8ff5e914b9639fe6e17"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:418c0c990c505accc8665bfc056f4297938a54176157bf1f0765f2fae584efec"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:183f7c3ddd9a2891bd1b6f5ea3af6d16517775268b3940259820ca3c83292d16"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eb90e71b70e3ea6c24711cfb5c48c711a2175c315daf07f4f28903aa773a48b7"}, - {file = "ddtrace-2.4.0-cp312-cp312-win32.whl", hash = "sha256:5eab75f1d4170c41de1f9c32e7e39714b2dd11a59d9ff7e94a199b88fa813ecd"}, - {file = "ddtrace-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:d892e0b71f3b6bcf31920b5e7fd699c86aea734bc02eec3c1b22acd8f63057e4"}, - {file = "ddtrace-2.4.0-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c07ea7a17a2897d891ee5e95de3b0e4f57184c471e87ffcc7208b3ccd68b9fcc"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05b28815e65d6361cd056c877ab051e132a6929b0d353313a499122e6522ea3"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63719bfc8fe5e8510022a3275145d6b2b1c4f955c395698fb792d99d4cda698d"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190f96eccdd8107cc93db6e79af4b8fc9403418c823d895af898cf635f5cada6"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b0fdb6a2fe0eadd122df4ea3a11690cb88f4f642bd19b1a21d01e9dcfd6eb20c"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1b2bf18ee10ea8fe668096a6c70db4161e228edee161b04719506947d7117937"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ca5fa396b8df0d7b55ad9e8d5b19be09c5dedefa388bf7590340ace5ce392e14"}, - {file = "ddtrace-2.4.0-cp37-cp37m-win32.whl", hash = "sha256:c67a4d8767aa269f8dfab79ae39b8170b95de6813bd1cba17dc951f0a1ee462b"}, - {file = "ddtrace-2.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1db7931541052622a91c8c6594b274d96efe956d5dbbe09c57a50c0f74640b52"}, - {file = "ddtrace-2.4.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8b6ab9f26d2ea50dfa69a282d727c865461f0c1b535f973922072f700cde031"}, - {file = "ddtrace-2.4.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:9ad7aa89988b77b893c3e9898fc48e3cef9471bc2648d6a83cc800b49cad1f1f"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38b95920bcc17289a0e3871830ef19030df763039021a796a1debb7fd4ea347b"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9152dcc4b8a98392ce5853b8e160f8d215ddd148337d42861ab3c12635b32b75"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c335be0ab8f4f376f51111219a9d85bcdbd6d75c18a8d5471817645bed1430c0"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c95339694034d4fbf9e1b2a0918f99b3936336e8deb4d513e9cf7a6ae1532f3"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f8bddc5e84e50663b64fbad2e2c61203484dea06de7759a47f096514d99f5c8f"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af7c4c94959481bc4060c7dfb5f7e70b1929b18089c7ea0329fc3f28707fd8a"}, - {file = "ddtrace-2.4.0-cp38-cp38-win32.whl", hash = "sha256:de3fcca4747340c835e7816009dd363d4e02dc5fc25365b2418dc3d986a6550a"}, - {file = "ddtrace-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:2f3dbcff2b305d34ecc63db05d0efeb923846ba07871be6f0a3509a33290fb69"}, - {file = "ddtrace-2.4.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7b43e2e890e868a133afc25f57774bb6bc8ae8841094cba4e8f2b3ee50f9c7ee"}, - {file = "ddtrace-2.4.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:de66ea03ca5b3f02d0f878fc9d486d4d4f654cf66b38d3fdf73bf314fc0e3f5b"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01cba8d20d4754135411e0e3398af02bc29b3c5f3dc85b1ee8cdfb9a0532f793"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb324809582b65baa682f045cb2873d686de3aa93cac75718462d0a23f980836"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f475ea4e2086e6a16a48568688918b21043ba391a6f968cb9bc17ec70d51de75"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1d4a5d9c89db2cc0e4a6eaf10b6d1af449d1ef14060000b23eceee19497705e"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a057db38d52271b6206bac2ab23f2a36cbe547397cba1ce586021df711570559"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:45ee78018276764f7fdaf1cf3b945660cf1ab39e1a03e0c61bf1984a71562204"}, - {file = "ddtrace-2.4.0-cp39-cp39-win32.whl", hash = "sha256:4f63dea207c90bb2c2d52ff9de0ee71b27aedb5d8540745e4e0b38a896737de0"}, - {file = "ddtrace-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3523c71d37fb3135d0817e92b486bcee7829c41e5465ed41b080286d7e2739d"}, - {file = "ddtrace-2.4.0.tar.gz", hash = "sha256:fb1bab23debb3a1fb71e3d6a1ce9818bc5e6ad9b885b901f78f3f28639393ecb"}, + {file = "ddtrace-2.5.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:f918538a6adb33696be653d343ee318b16ea977376d9b7214d14fe97c42e9bd9"}, + {file = "ddtrace-2.5.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f56735eb636d3ab2f7224f261d3a6bd43f884e9901d68407d485ea65f3dc0f46"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72d21fe6842a8d80c8765dd699153a2475ae2d49e82e10f9668eadb08b454040"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6e48caf63506d7ac3df7caa955b6258de91c1a1f55149506ab8ac36143770b9"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3f26e04ba7521f6885d871fd6266fedc0a7ccf2637b85579c058927404bad7"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:15d78b0cd5d2090c063031d76e933b8b24e043d524a6091a751cf57b0fab025f"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee76beaf87695f2204b0c2c2a3664b39f3483b7a8447b28e5e2bcc899861b3eb"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8840f0e82d6dca3888bd06e7ab0ca6d39009f3cd3475028d8bc03c939127afc2"}, + {file = "ddtrace-2.5.2-cp310-cp310-win32.whl", hash = "sha256:a34ccab0c8991c5fc5252d5cd6e88852cd7f77c8bf838de84e70b4a3bfacaad4"}, + {file = "ddtrace-2.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:ffa4f5779c7000fe5960156bd15339184355b30a661b0955799cae50da5d03a7"}, + {file = "ddtrace-2.5.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ea2740a3d61876cb07b271af444e98cdc8b730497cfcddbc3794c7a7441b8d15"}, + {file = "ddtrace-2.5.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:62e775ba9d2a2b5f952a6609029e965057bdd852ccd6e53b55c0f82ae83aa542"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30186112f156a564efda5e2018240b55baee7664897ca5fc35c452d032a77185"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9dccdc69de364cffc2b892280724c78cb54db151452a0b6d1b4a89b6f060c44"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa2543c2303ab325af7794f2a8a420133cd9222e70bfbce3869da146fc5e2ba"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aa2e64f79ada9f2fd5307cd0eba726d8585e47b0282fb9463aaa4b267265e94a"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:37b4d55a5be59530e6e5761a36d727aee812be69c81b00ee0182eb62be6f3b75"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d97f990d2322a23e82203cc5a2aa694fb0d42541a44bb120390e6598a63e5f5"}, + {file = "ddtrace-2.5.2-cp311-cp311-win32.whl", hash = "sha256:5d3f1bc3ce87fbcf2256197178179ef681df720ebbc39b0559bda00247744533"}, + {file = "ddtrace-2.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:a50057085b0972e695bb1ef3042f6cd6a1a3b12111fac4985942f2dbbcf8ac2f"}, + {file = "ddtrace-2.5.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b923b099b9a1e50f01ce8bcd4d11e3255a48c71f3e6314dd9a482baed0a87ed6"}, + {file = "ddtrace-2.5.2-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:512d3975b1657c706ca9c84373e5fce323f6fc94bfac33c30876ad8d55e0ea71"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c54bc474c70151d5a141061b6c20a1efabdf458e4239c790d45fa12a13b8e7d"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5fb2bbd38dc46ba6a7ea1031c4751b1ca888be5fac8a42049ebc2517707c00d"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa6fb6bcfb3810d8f0882e489e7d2ef4dd3a92b452cfdd8d1fd4703dc496b17"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f4eed40d978352c7371804ecb68bbe9e55967bb904bd03b0568554e0b6b92cf"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:57606af5380888e2e7cc67b7c4fa5e1bc51d29c48f004b4be0cbe1b319fddc75"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee8d0259a004964a8eddb394aa84a5754435d4270cd2041e6559c9e68fa49141"}, + {file = "ddtrace-2.5.2-cp312-cp312-win32.whl", hash = "sha256:4df564e620ec7e657fcdb0d5bf1231aa1357bf49b736f0d9e9f6df17a23fc569"}, + {file = "ddtrace-2.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:637f16af1c84566bde044798312c67bc5676df949632ab02e740440558f2a598"}, + {file = "ddtrace-2.5.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:d24841a9390f3e169edcaf1ca5ac80599062e66dee43a510decb25e779b6f7b4"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49aa4e0210862e829e09569de2e2f34ac17c5e246567c5b6662ec21e2a06d938"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:985738fe875b11f05dfa2b1f21a619d499344eb740f63e01d6eae1fb29eb949b"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8814321822e4afc95ac86fbc476dc20d78dd4b1d510c02606459df4580093d18"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ad6c0ae7baff9d00c689834aec0627274d681ed1d2a8ae627348a6191e8d32ec"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa596f2e80c525a2310e605bfa3fa6ba6790b2ae90c02e47ceee0e62ceae17a6"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6bdfae9fa03af334820678196a4895450d0b6bd9f1b5119d42ddbd327a55fcce"}, + {file = "ddtrace-2.5.2-cp37-cp37m-win32.whl", hash = "sha256:227bb0391d310e0d5a54505c7ab59f9692a5db91dc492373489bc45726980e1d"}, + {file = "ddtrace-2.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6e55c4738b58b4452933204305243e19000f6f283af93bf51b63382100cb8f21"}, + {file = "ddtrace-2.5.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4d9e7a9e26c38ae1e368f5d820e78459ff2d39689f40d4a3db185ddb3686c383"}, + {file = "ddtrace-2.5.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c361ea11b442b04d8e011528205ed65b926d71d18f38d372270204eabf49b068"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aafd86eeea622cd0e8cf6b63632efc67a52a32317d2a376382ef6170d383c9f"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ff039635470ba483ed448baaf6337d85a731b17af62fef06dfa811f761f374f"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f1cb3bea1170410d603f9d557918c24d4d8783659c03817daea6352d9f37f9"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7351500241eb24c7d789b371a6860ca2b0e2db1ff9d317089153b562a3a461e1"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a2cfc6ee800890556e404b94d13680c83952efa5d3dafa72ef8cb08a8782f874"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96a791f03b62ebdb9f3e635a0e93711149123a8fc1c1c152be0d1cdb5d8e6359"}, + {file = "ddtrace-2.5.2-cp38-cp38-win32.whl", hash = "sha256:6c61e72abec3f2f6b46e53712a32a971de1b6a9be657d5ebeff1334f6146babc"}, + {file = "ddtrace-2.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:b93d8b536f5fc45a72bb2785051dc729f4d581ef2d69ed10bccae6a7487477b2"}, + {file = "ddtrace-2.5.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:38cbcb7b4ff1371480b29228d2b8e570e7d7b386a7632b96f9600135ec3eb9db"}, + {file = "ddtrace-2.5.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a270d128c6067f52a76ecbb658fae3f4d3bd4888baa9e6159ff82b6de14c53be"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e59f3958016fcec5eb16abd7979a9ec4d850733e2a03b878b096277fc092784"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:066403f0e00a8de09c8187037befe7463d1fab5d8178b62a07c2542792710d14"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbcbf24bca8497f1412ec438fbdc94847aef9e86092ffd4f8626bbe6d278d33"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d34f8da809e2783770a6c88396b3653fb12a4196e9b5f16b8c10f37bbf2b7b31"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9eaca41664dd0c2bd7257fe2e91c7e46718b20591bfaa0b5c01c39b599115f88"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f4b67e02ba5c316711719dcfc15e94f47684e7af1785289d016a29a2c664827"}, + {file = "ddtrace-2.5.2-cp39-cp39-win32.whl", hash = "sha256:9bbd675d73aae6516e02a86cb830778771dafb0e182d5a122270ccd82ee77eed"}, + {file = "ddtrace-2.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:e93f3f5d3d57beb492b04286c758be65495908bd313df6f56865ad7af222e49e"}, + {file = "ddtrace-2.5.2.tar.gz", hash = "sha256:5addeb19eea5ebdc23c493e5635f4c8737795b48ba637117a1895f31b900985f"}, ] [package.dependencies] @@ -943,7 +952,7 @@ bytecode = [ cattrs = "*" ddsketch = ">=2.0.1" envier = "*" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = "<=6.5.0", markers = "python_version < \"3.8\""} opentelemetry-api = ">=1" protobuf = ">=3" setuptools = {version = "*", markers = "python_version >= \"3.12\""} @@ -1019,13 +1028,13 @@ ssh = ["paramiko (>=2.4.3)"] [[package]] name = "envier" -version = "0.5.0" +version = "0.5.1" description = "Python application configuration via the environment" optional = false python-versions = ">=3.7" files = [ - {file = "envier-0.5.0-py3-none-any.whl", hash = "sha256:5fed6099ee5d7ad4cf664f8bb99d1281d4ab5fadeec8f40ba9458610938293be"}, - {file = "envier-0.5.0.tar.gz", hash = "sha256:f35ca8605f0c70c2c0367133af9dc1ef16710021dbd0e28c1b0a83070db06768"}, + {file = "envier-0.5.1-py3-none-any.whl", hash = "sha256:b45ef6051fea33d0c32a64e186bff2cfb446e2242d6781216c9bc9ce708c5909"}, + {file = "envier-0.5.1.tar.gz", hash = "sha256:bd5ccf707447973ea0f4125b7df202ba415ad888bcdcb8df80e0b002ee11ffdb"}, ] [package.extras] @@ -1250,17 +1259,6 @@ files = [ {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, - {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, - {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, @@ -1321,13 +1319,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.7.0" +version = "6.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, + {file = "importlib_metadata-6.5.0-py3-none-any.whl", hash = "sha256:03ba783c3a2c69d751b109fc0c94a62c51f581b3d6acf8ed1331b6d5729321ff"}, + {file = "importlib_metadata-6.5.0.tar.gz", hash = "sha256:7a8bdf1bc3a726297f5cfbc999e6e7ff6b4fa41b26bba4afc580448624460045"}, ] [package.dependencies] @@ -1337,7 +1335,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -1463,6 +1461,20 @@ files = [ [package.dependencies] jsonpointer = ">=1.9" +[[package]] +name = "jsonpath-ng" +version = "1.6.1" +description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." +optional = true +python-versions = "*" +files = [ + {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, + {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, +] + +[package.dependencies] +ply = "*" + [[package]] name = "jsonpickle" version = "3.0.2" @@ -1612,71 +1624,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -2171,6 +2183,17 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +optional = true +python-versions = "*" +files = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] + [[package]] name = "protobuf" version = "4.24.4" @@ -2505,13 +2528,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] @@ -2549,7 +2572,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2557,16 +2579,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2583,7 +2597,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2591,7 +2604,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3144,6 +3156,20 @@ files = [ [package.dependencies] types-urllib3 = "*" +[[package]] +name = "types-requests" +version = "2.31.0.20231231" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-requests-2.31.0.20231231.tar.gz", hash = "sha256:0f8c0c9764773384122813548d9eea92a5c4e1f33ed54556b508968ec5065cee"}, + {file = "types_requests-2.31.0.20231231-py3-none-any.whl", hash = "sha256:2e2230c7bc8dd63fa3153c1c0ae335f8a368447f0582fc332f17d54f88e69027"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "types-urllib3" version = "1.26.25.14" @@ -3390,10 +3416,10 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] +all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "jsonpath-ng", "pydantic"] aws-sdk = ["boto3"] datadog = ["datadog-lambda"] -datamasking-aws-sdk = ["aws-encryption-sdk"] +datamasking = ["aws-encryption-sdk", "jsonpath-ng"] parser = ["pydantic"] redis = ["redis"] tracer = ["aws-xray-sdk"] @@ -3402,4 +3428,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "f4c66a8fa656902aba0c04cc8b5dc236d7f0ed6f7c3e22507cc89e711b0b62b2" +content-hash = "28c3a405185f635f8e65ea51adfe1cfc589cb469497d800100521f91037ba26a" diff --git a/pyproject.toml b/pyproject.toml index 0e576d412df..cb1f322e9ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ redis = {version = ">=4.4,<6.0", optional = true} typing-extensions = "^4.6.2" datadog-lambda = { version = ">=4.77,<6.0", optional = true } aws-encryption-sdk = { version = "^3.1.1", optional = true } +jsonpath-ng = { version = "^1.6.0", optional = true } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.2"} @@ -97,11 +98,11 @@ parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] redis = ["redis"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema", "aws-encryption-sdk", "jsonpath-ng"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] datadog = ["datadog-lambda"] -datamasking-aws-sdk = ["aws-encryption-sdk"] +datamasking = ["aws-encryption-sdk", "jsonpath-ng"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.83.8" diff --git a/tests/e2e/data_masking/handlers/basic_handler.py b/tests/e2e/data_masking/handlers/basic_handler.py index f31e822429a..6f696391822 100644 --- a/tests/e2e/data_masking/handlers/basic_handler.py +++ b/tests/e2e/data_masking/handlers/basic_handler.py @@ -1,6 +1,6 @@ from aws_lambda_powertools import Logger -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider logger = Logger() @@ -14,7 +14,7 @@ def lambda_handler(event, context): # Encrypting data for test_encryption_in_handler test kms_key = event.get("kms_key", "") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key])) value = [1, 2, "string", 4.5] encrypted_data = data_masker.encrypt(value) response = {} diff --git a/tests/e2e/data_masking/test_e2e_data_masking.py b/tests/e2e/data_masking/test_e2e_data_masking.py index 80f45564177..a720a265d83 100644 --- a/tests/e2e/data_masking/test_e2e_data_masking.py +++ b/tests/e2e/data_masking/test_e2e_data_masking.py @@ -4,15 +4,13 @@ import pytest from aws_encryption_sdk.exceptions import DecryptKeyError -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import ( - AwsEncryptionSdkProvider, - ContextMismatchError, +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.exceptions import DataMaskingContextMismatchError +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, ) from tests.e2e.utils import data_fetcher -pytest.skip(reason="Data masking tests disabled until we go GA.", allow_module_level=True) - @pytest.fixture def basic_handler_fn(infrastructure: dict) -> str: @@ -36,7 +34,7 @@ def kms_key2_arn(infrastructure: dict) -> str: @pytest.fixture def data_masker(kms_key1_arn) -> DataMasking: - return DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key1_arn])) + return DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key1_arn])) @pytest.mark.xdist_group(name="data_masking") @@ -79,7 +77,7 @@ def test_encryption_context_mismatch(data_masker): encrypted_data = data_masker.encrypt(value, encryption_context={"this": "is_secure"}) # THEN decrypting with a different encryption_context should raise a ContextMismatchError - with pytest.raises(ContextMismatchError): + with pytest.raises(DataMaskingContextMismatchError): data_masker.decrypt(encrypted_data, encryption_context={"not": "same_context"}) @@ -93,7 +91,7 @@ def test_encryption_no_context_fail(data_masker): encrypted_data = data_masker.encrypt(value) # THEN decrypting with an encryption_context should raise a ContextMismatchError - with pytest.raises(ContextMismatchError): + with pytest.raises(DataMaskingContextMismatchError): data_masker.decrypt(encrypted_data, encryption_context={"this": "is_secure"}) @@ -106,7 +104,7 @@ def test_encryption_decryption_key_mismatch(data_masker, kms_key2_arn): encrypted_data = data_masker.encrypt(value) # THEN when decrypting with a different key it should fail - data_masker_key2 = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key2_arn])) + data_masker_key2 = DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key2_arn])) with pytest.raises(DecryptKeyError): data_masker_key2.decrypt(encrypted_data) diff --git a/tests/functional/data_masking/test_aws_encryption_sdk.py b/tests/functional/data_masking/test_aws_encryption_sdk.py index 978c2e21572..c1dfd22c6b9 100644 --- a/tests/functional/data_masking/test_aws_encryption_sdk.py +++ b/tests/functional/data_masking/test_aws_encryption_sdk.py @@ -1,34 +1,36 @@ from __future__ import annotations import base64 +import functools import json -from typing import Any, Callable, Dict, Union +from typing import Any, Callable import pytest +from aws_encryption_sdk.identifiers import Algorithm -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider -from aws_lambda_powertools.utilities._data_masking.provider.kms import ( - AwsEncryptionSdkProvider, +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider +from aws_lambda_powertools.utilities.data_masking.provider.kms import ( + AWSEncryptionSDKProvider, ) class FakeEncryptionKeyProvider(BaseProvider): def __init__( self, - json_serializer: Callable[[Dict], str] | None = None, - json_deserializer: Callable[[Union[Dict, str, bool, int, float]], str] | None = None, - ): - super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + json_serializer: Callable = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable = json.loads, + ) -> None: + super().__init__(json_serializer, json_deserializer) def encrypt(self, data: bytes | str, **kwargs) -> str: - data = self.json_serializer(data) - ciphertext = base64.b64encode(data).decode() + encoded_data: str = self.json_serializer(data) + ciphertext = base64.b64encode(encoded_data.encode("utf-8")).decode() return ciphertext def decrypt(self, data: bytes, **kwargs) -> Any: - ciphertext_decoded = base64.b64decode(data) + ciphertext_decoded = base64.b64decode(data).decode("utf-8") ciphertext = self.json_deserializer(ciphertext_decoded) return ciphertext @@ -37,74 +39,74 @@ def decrypt(self, data: bytes, **kwargs) -> Any: def data_masker(monkeypatch) -> DataMasking: """DataMasking using AWS Encryption SDK Provider with a fake client""" fake_key_provider = FakeEncryptionKeyProvider() - provider = AwsEncryptionSdkProvider( + provider = AWSEncryptionSDKProvider( keys=["dummy"], key_provider=fake_key_provider, ) return DataMasking(provider=provider) -def test_mask_int(data_masker): +def test_erase_int(data_masker): # GIVEN an int data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(42) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(42) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_float(data_masker): +def test_erase_float(data_masker): # GIVEN a float data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(4.2) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(4.2) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_bool(data_masker): +def test_erase_bool(data_masker): # GIVEN a bool data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(True) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(True) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_none(data_masker): +def test_erase_none(data_masker): # GIVEN a None data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(None) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(None) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_str(data_masker): +def test_erase_str(data_masker): # GIVEN a str data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask("this is a string") + # WHEN erase is called with no fields argument + erased_string = data_masker.erase("this is a string") # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_list(data_masker): +def test_erase_list(data_masker): # GIVEN a list data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask([1, 2, "string", 3]) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase([1, 2, "string", 3]) # THEN the result is the data masked, while maintaining type list - assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + assert erased_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] -def test_mask_dict(data_masker): +def test_erase_dict(data_masker): # GIVEN a dict data type data = { "a": { @@ -113,14 +115,14 @@ def test_mask_dict(data_masker): }, } - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(data) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(data) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_dict_with_fields(data_masker): +def test_erase_dict_with_fields(data_masker): # GIVEN a dict data type data = { "a": { @@ -129,11 +131,11 @@ def test_mask_dict_with_fields(data_masker): }, } - # WHEN mask is called with a list of fields specified - masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + erased_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) # THEN the result is only the specified fields are masked - assert masked_string == { + assert erased_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, @@ -141,7 +143,7 @@ def test_mask_dict_with_fields(data_masker): } -def test_mask_json_dict_with_fields(data_masker): +def test_erase_json_dict_with_fields(data_masker): # GIVEN the data type is a json representation of a dictionary data = json.dumps( { @@ -152,8 +154,8 @@ def test_mask_json_dict_with_fields(data_masker): }, ) - # WHEN mask is called with a list of fields specified - masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + masked_json_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) # THEN the result is only the specified fields are masked assert masked_json_string == { @@ -257,8 +259,8 @@ def test_encrypt_dict_with_fields(data_masker): } # WHEN encrypting and then decrypting the encrypted data - encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) - decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is only the specified fields are masked assert decrypted_data == data @@ -276,8 +278,199 @@ def test_encrypt_json_dict_with_fields(data_masker): ) # WHEN encrypting and then decrypting the encrypted data - encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) - decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is only the specified fields are masked - assert decrypted_data == json.loads(data) + assert decrypted_data == data + + +def test_encrypt_json_with_list_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": [{"key1": [0, 1]}], + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_json_with_tuple_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_encryption_context(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, data_classification="confidential") + decrypted_data = data_masker.decrypt(encrypted_data, data_classification="confidential") + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_complex_dict(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_slice(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_complex_search(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_provider_options(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + provider_options = {"algorithm": Algorithm.AES_256_GCM_HKDF_SHA512_COMMIT_KEY} + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, provider_options=provider_options) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py index 9a898ea10cd..76081b20392 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function1024(): logger.info("Hello world function1024 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_1024": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py index 6b8250579a5..b191ade241a 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function128(): logger.info("Hello world function128 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_128": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py index 623a1f7b232..19d287e6011 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function1769(): logger.info("Hello world function1769 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_1769": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml index b70fb6d061e..7df194d80bb 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml @@ -38,7 +38,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: @@ -68,7 +70,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: @@ -98,7 +102,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: diff --git a/tests/performance/data_masking/test_perf_data_masking.py b/tests/performance/data_masking/test_perf_data_masking.py index 688e36c7a64..668da32a6e9 100644 --- a/tests/performance/data_masking/test_perf_data_masking.py +++ b/tests/performance/data_masking/test_perf_data_masking.py @@ -3,11 +3,11 @@ import pytest -from aws_lambda_powertools.utilities._data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.base import DataMasking -DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities._data_masking" +DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities.data_masking" DATA_MASKING_INIT_SLA: float = 0.002 -DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.001 +DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.05 json_blob = { "id": 1, @@ -55,15 +55,15 @@ def test_data_masking_init(benchmark): pytest.fail(f"High level imports should be below {DATA_MASKING_INIT_SLA}s: {stat}") -def mask_json_blob(): +def erase_json_blob(): data_masker = DataMasking() - data_masker.mask(json_blob, json_blob_fields) + data_masker.erase(json_blob, json_blob_fields) @pytest.mark.perf @pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) def test_data_masking_encrypt_with_json_blob(benchmark): - benchmark.pedantic(mask_json_blob) + benchmark.pedantic(erase_json_blob) stat = benchmark.stats.stats.max if stat > DATA_MASKING_NESTED_ENCRYPT_SLA: pytest.fail(f"High level imports should be below {DATA_MASKING_NESTED_ENCRYPT_SLA}s: {stat}") diff --git a/tests/unit/data_masking/test_kms_provider.py b/tests/unit/data_masking/test_kms_provider.py new file mode 100644 index 00000000000..5fe9b2e53ed --- /dev/null +++ b/tests/unit/data_masking/test_kms_provider.py @@ -0,0 +1,42 @@ +import pytest + +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingContextMismatchError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + KMSKeyProvider, +) + + +def test_encryption_context_exact_match(): + ctx = {"data_classification": "confidential", "data_type": "customer_data"} + ctx_two = {"data_type": "customer_data", "data_classification": "confidential"} + + KMSKeyProvider._compare_encryption_context(ctx, ctx_two) + + +def test_encryption_context_partial_match(): + ctx = {"data_classification": "confidential", "data_type": "customer_data"} + ctx_two = {"data_type": "customer_data"} + + with pytest.raises(DataMaskingContextMismatchError): + KMSKeyProvider._compare_encryption_context(ctx, ctx_two) + + +def test_encryption_context_supported_values(): + ctx = {"a": "b", "c": "d"} + KMSKeyProvider._validate_encryption_context(ctx) + KMSKeyProvider._validate_encryption_context({}) + + +@pytest.mark.parametrize( + "ctx", + [ + pytest.param({"a": 10, "b": True, "c": []}, id="non_string_values"), + pytest.param({"a": {"b": "c"}}, id="nested_dict"), + ], +) +def test_encryption_context_non_str_validation(ctx): + with pytest.raises(DataMaskingUnsupportedTypeError): + KMSKeyProvider._validate_encryption_context(ctx) diff --git a/tests/unit/data_masking/test_unit_data_masking.py b/tests/unit/data_masking/test_unit_data_masking.py index 4a92a668d73..4fbbc188ceb 100644 --- a/tests/unit/data_masking/test_unit_data_masking.py +++ b/tests/unit/data_masking/test_unit_data_masking.py @@ -2,8 +2,12 @@ import pytest -from aws_lambda_powertools.utilities._data_masking.base import DataMasking -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingFieldNotFoundError, + DataMaskingUnsupportedTypeError, +) @pytest.fixture @@ -11,67 +15,67 @@ def data_masker() -> DataMasking: return DataMasking() -def test_mask_int(data_masker): +def test_erase_int(data_masker): # GIVEN an int data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(42) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(42) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_float(data_masker): +def test_erase_float(data_masker): # GIVEN a float data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(4.2) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(4.2) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_bool(data_masker): +def test_erase_bool(data_masker): # GIVEN a bool data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(True) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(True) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_none(data_masker): +def test_erase_none(data_masker): # GIVEN a None data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(None) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(None) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_str(data_masker): +def test_erase_str(data_masker): # GIVEN a str data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask("this is a string") + # WHEN erase is called with no fields argument + erased_string = data_masker.erase("this is a string") # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_list(data_masker): +def test_erase_list(data_masker): # GIVEN a list data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask([1, 2, "string", 3]) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase([1, 2, "string", 3]) # THEN the result is the data masked, while maintaining type list - assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + assert erased_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] -def test_mask_dict(data_masker): +def test_erase_dict(data_masker): # GIVEN a dict data type data = { "a": { @@ -80,14 +84,14 @@ def test_mask_dict(data_masker): }, } - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(data) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(data) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_dict_with_fields(data_masker): +def test_erase_dict_with_fields(data_masker): # GIVEN a dict data type data = { "a": { @@ -96,11 +100,11 @@ def test_mask_dict_with_fields(data_masker): }, } - # WHEN mask is called with a list of fields specified - masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + erased_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) - # THEN the result is only the specified fields are masked - assert masked_string == { + # THEN the result is only the specified fields are erased + assert erased_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, @@ -108,7 +112,7 @@ def test_mask_dict_with_fields(data_masker): } -def test_mask_json_dict_with_fields(data_masker): +def test_erase_json_dict_with_fields(data_masker): # GIVEN the data type is a json representation of a dictionary data = json.dumps( { @@ -119,10 +123,10 @@ def test_mask_json_dict_with_fields(data_masker): }, ) - # WHEN mask is called with a list of fields specified - masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + masked_json_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) - # THEN the result is only the specified fields are masked + # THEN the result is only the specified fields are erased assert masked_json_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, @@ -153,13 +157,24 @@ def test_parsing_unsupported_data_type(data_masker): # GIVEN an initialization of the DataMasking class # WHEN attempting to pass in a list of fields with input data that is not a dict - with pytest.raises(TypeError): + with pytest.raises(DataMaskingUnsupportedTypeError): # THEN the result is a TypeError - data_masker.mask(42, ["this.field"]) + data_masker.erase(42, ["this.field"]) -def test_parsing_nonexistent_fields(data_masker): +def test_parsing_with_empty_field(data_masker): + # GIVEN an initialization of the DataMasking class + + # WHEN attempting to pass in a list of fields with input data that is not a dict + with pytest.raises(ValueError): + # THEN the result is a TypeError + data_masker.erase(42, []) + + +def test_parsing_nonexistent_fields_with_raise_on_missing_field(): # GIVEN a dict data type + + data_masker = DataMasking(raise_on_missing_field=True) data = { "3": { "1": {"None": "hello", "four": "world"}, @@ -168,13 +183,15 @@ def test_parsing_nonexistent_fields(data_masker): } # WHEN attempting to pass in fields that do not exist in the input data - with pytest.raises(KeyError): + with pytest.raises(DataMaskingFieldNotFoundError): # THEN the result is a KeyError - data_masker.mask(data, ["3.1.True"]) + data_masker.erase(data, ["'3'..True"]) -def test_parsing_nonstring_fields(data_masker): +def test_parsing_nonexistent_fields_warning_on_missing_field(): # GIVEN a dict data type + + data_masker = DataMasking(raise_on_missing_field=False) data = { "3": { "1": {"None": "hello", "four": "world"}, @@ -182,24 +199,9 @@ def test_parsing_nonstring_fields(data_masker): }, } - # WHEN attempting to pass in a list of fields that are not strings - masked = data_masker.mask(data, fields=[3.4]) - - # THEN the result is the value of the nested field should be masked as normal - assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}} - - -def test_parsing_nonstring_keys_and_fields(data_masker): - # GIVEN a dict data type with integer keys - data = { - 3: { - "1": {"None": "hello", "four": "world"}, - 4: {"33": {"5": "goodbye", "e": "world"}}, - }, - } - - # WHEN masked with a list of fields that are integer keys - masked = data_masker.mask(data, fields=[3.4]) + # WHEN erase is called with a non-existing field + with pytest.warns(UserWarning, match="Field or expression*"): + masked_json_string = data_masker.erase(data, fields=["non-existing"]) - # THEN the result is the value of the nested field should be masked - assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}} + # THEN the "erased" payload is the same of the original + assert masked_json_string == data From 33820d12921aa8491588aa6a281a45066073e767 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Thu, 1 Feb 2024 16:44:47 +0000 Subject: [PATCH 28/32] feat(event_handler): support Header parameter validation in OpenAPI schema (#3687) * Adding header - Initial commit * Adding header - Fix VPC Lattice Payload * Adding header - tests and final changes * Making sonarqube happy * Adding documentation * Rafactoring to be complaint with RFC * Adding tests * Adding test with Uppercase variables * Revert event changes * Adding HTTP RFC * Adding getter/setter to clean the code * Adding getter/setter to clean the code * Addressing Ruben's feedback --- .../middlewares/openapi_validation.py | 48 +- .../event_handler/openapi/dependant.py | 27 +- .../event_handler/openapi/params.py | 79 +- .../utilities/data_classes/alb_event.py | 11 + .../data_classes/api_gateway_proxy_event.py | 19 + .../data_classes/bedrock_agent_event.py | 6 +- .../utilities/data_classes/common.py | 15 + .../utilities/data_classes/vpc_lattice.py | 15 + docs/core/event_handler/api_gateway.md | 40 +- .../src/validating_headers.py | 39 + .../src/working_with_headers_multi_value.py | 34 + .../events/albMultiValueQueryStringEvent.json | 7 + tests/events/apiGatewayProxyEvent.json | 2 +- .../lambdaFunctionUrlEventWithHeaders.json | 4 +- tests/events/vpcLatticeEvent.json | 4 +- .../events/vpcLatticeV2EventWithHeaders.json | 31 +- .../event_handler/test_openapi_params.py | 6 +- .../test_openapi_validation_middleware.py | 699 +++++++++++++----- 18 files changed, 873 insertions(+), 213 deletions(-) create mode 100644 examples/event_handler_rest/src/validating_headers.py create mode 100644 examples/event_handler_rest/src/working_with_headers_multi_value.py diff --git a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py index fd7507603de..54c48189282 100644 --- a/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py +++ b/aws_lambda_powertools/event_handler/middlewares/openapi_validation.py @@ -81,9 +81,22 @@ def handler(self, app: EventHandlerInstance, next_middleware: NextMiddleware) -> query_string, ) + # Normalize header values before validate this + headers = _normalize_multi_header_values_with_param( + app.current_event.resolved_headers_field, + route.dependant.header_params, + ) + + # Process header values + header_values, header_errors = _request_params_to_args( + route.dependant.header_params, + headers, + ) + values.update(path_values) values.update(query_values) - errors += path_errors + query_errors + values.update(header_values) + errors += path_errors + query_errors + header_errors # Process the request body, if it exists if route.dependant.body_params: @@ -243,12 +256,14 @@ def _request_params_to_args( errors = [] for field in required_params: - value = received_params.get(field.alias) - field_info = field.field_info + + # To ensure early failure, we check if it's not an instance of Param. if not isinstance(field_info, Param): raise AssertionError(f"Expected Param field_info, got {field_info}") + value = received_params.get(field.alias) + loc = (field_info.in_.value, field.alias) # If we don't have a value, see if it's required or has a default @@ -377,3 +392,30 @@ def _normalize_multi_query_string_with_param(query_string: Optional[Dict[str, st except KeyError: pass return query_string + + +def _normalize_multi_header_values_with_param(headers: Optional[Dict[str, str]], params: Sequence[ModelField]): + """ + Extract and normalize resolved_headers_field + + Parameters + ---------- + headers: Dict + A dictionary containing the initial header parameters. + params: Sequence[ModelField] + A sequence of ModelField objects representing parameters. + + Returns + ------- + A dictionary containing the processed headers. + """ + if headers: + for param in filter(is_scalar_field, params): + try: + if len(headers[param.alias]) == 1: + # if the target parameter is a scalar and the list contains only 1 element + # we keep the first value of the headers regardless if there are more in the payload + headers[param.alias] = headers[param.alias][0] + except KeyError: + pass + return headers diff --git a/aws_lambda_powertools/event_handler/openapi/dependant.py b/aws_lambda_powertools/event_handler/openapi/dependant.py index 418a86e083c..abcb91e90dd 100644 --- a/aws_lambda_powertools/event_handler/openapi/dependant.py +++ b/aws_lambda_powertools/event_handler/openapi/dependant.py @@ -14,12 +14,12 @@ from aws_lambda_powertools.event_handler.openapi.params import ( Body, Dependant, + Header, Param, ParamTypes, Query, _File, _Form, - _Header, analyze_param, create_response_field, get_flat_dependant, @@ -59,16 +59,21 @@ def add_param_to_fields( """ field_info = cast(Param, field.field_info) - if field_info.in_ == ParamTypes.path: - dependant.path_params.append(field) - elif field_info.in_ == ParamTypes.query: - dependant.query_params.append(field) - elif field_info.in_ == ParamTypes.header: - dependant.header_params.append(field) + + # Dictionary to map ParamTypes to their corresponding lists in dependant + param_type_map = { + ParamTypes.path: dependant.path_params, + ParamTypes.query: dependant.query_params, + ParamTypes.header: dependant.header_params, + ParamTypes.cookie: dependant.cookie_params, + } + + # Check if field_info.in_ is a valid key in param_type_map and append the field to the corresponding list + # or raise an exception if it's not a valid key. + if field_info.in_ in param_type_map: + param_type_map[field_info.in_].append(field) else: - if field_info.in_ != ParamTypes.cookie: - raise AssertionError(f"Unsupported param type: {field_info.in_}") - dependant.cookie_params.append(field) + raise AssertionError(f"Unsupported param type: {field_info.in_}") def get_typed_annotation(annotation: Any, globalns: Dict[str, Any]) -> Any: @@ -265,7 +270,7 @@ def is_body_param(*, param_field: ModelField, is_path_param: bool) -> bool: return False elif is_scalar_field(field=param_field): return False - elif isinstance(param_field.field_info, (Query, _Header)) and is_scalar_sequence_field(param_field): + elif isinstance(param_field.field_info, (Query, Header)) and is_scalar_sequence_field(param_field): return False else: if not isinstance(param_field.field_info, Body): diff --git a/aws_lambda_powertools/event_handler/openapi/params.py b/aws_lambda_powertools/event_handler/openapi/params.py index 78426cbc7c9..d5665a48d30 100644 --- a/aws_lambda_powertools/event_handler/openapi/params.py +++ b/aws_lambda_powertools/event_handler/openapi/params.py @@ -486,7 +486,7 @@ def __init__( ) -class _Header(Param): +class Header(Param): """ A class used internally to represent a header parameter in a path operation. """ @@ -527,12 +527,75 @@ def __init__( json_schema_extra: Union[Dict[str, Any], None] = None, **extra: Any, ): + """ + Constructs a new Query param. + + Parameters + ---------- + default: Any + The default value of the parameter + default_factory: Callable[[], Any], optional + Callable that will be called when a default value is needed for this field + annotation: Any, optional + The type annotation of the parameter + alias: str, optional + The public name of the field + alias_priority: int, optional + Priority of the alias. This affects whether an alias generator is used + validation_alias: str | AliasPath | AliasChoices | None, optional + Alias to be used for validation only + serialization_alias: str | AliasPath | AliasChoices | None, optional + Alias to be used for serialization only + convert_underscores: bool + If true convert "_" to "-" + See RFC: https://www.rfc-editor.org/rfc/rfc9110.html#name-field-name-registry + title: str, optional + The title of the parameter + description: str, optional + The description of the parameter + gt: float, optional + Only applies to numbers, required the field to be "greater than" + ge: float, optional + Only applies to numbers, required the field to be "greater than or equal" + lt: float, optional + Only applies to numbers, required the field to be "less than" + le: float, optional + Only applies to numbers, required the field to be "less than or equal" + min_length: int, optional + Only applies to strings, required the field to have a minimum length + max_length: int, optional + Only applies to strings, required the field to have a maximum length + pattern: str, optional + Only applies to strings, requires the field match against a regular expression pattern string + discriminator: str, optional + Parameter field name for discriminating the type in a tagged union + strict: bool, optional + Enables Pydantic's strict mode for the field + multiple_of: float, optional + Only applies to numbers, requires the field to be a multiple of the given value + allow_inf_nan: bool, optional + Only applies to numbers, requires the field to allow infinity and NaN values + max_digits: int, optional + Only applies to Decimals, requires the field to have a maxmium number of digits within the decimal. + decimal_places: int, optional + Only applies to Decimals, requires the field to have at most a number of decimal places + examples: List[Any], optional + A list of examples for the parameter + deprecated: bool, optional + If `True`, the parameter will be marked as deprecated + include_in_schema: bool, optional + If `False`, the parameter will be excluded from the generated OpenAPI schema + json_schema_extra: Dict[str, Any], optional + Extra values to include in the generated OpenAPI schema + """ self.convert_underscores = convert_underscores + self._alias = alias + super().__init__( default=default, default_factory=default_factory, annotation=annotation, - alias=alias, + alias=self._alias, alias_priority=alias_priority, validation_alias=validation_alias, serialization_alias=serialization_alias, @@ -558,6 +621,18 @@ def __init__( **extra, ) + @property + def alias(self): + return self._alias + + @alias.setter + def alias(self, value: Optional[str] = None): + if value is not None: + # Headers are case-insensitive according to RFC 7540 (HTTP/2), so we lower the parameter name + # This ensures that customers can access headers with any casing, as per the RFC guidelines. + # Reference: https://www.rfc-editor.org/rfc/rfc7540#section-8.1.2 + self._alias = value.lower() + class Body(FieldInfo): """ diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 688c9567efa..98f37b4f415 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -42,6 +42,17 @@ def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: return self.query_string_parameters + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + headers: Dict[str, Any] = {} + + if self.multi_value_headers: + headers = self.multi_value_headers + else: + headers = self.headers + + return {key.lower(): value for key, value in headers.items()} + @property def multi_value_headers(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueHeaders") diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index 9e013eac038..c37bd22ca53 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -125,6 +125,17 @@ def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: return self.query_string_parameters + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + headers: Dict[str, Any] = {} + + if self.multi_value_headers: + headers = self.multi_value_headers + else: + headers = self.headers + + return {key.lower(): value for key, value in headers.items()} + @property def request_context(self) -> APIGatewayEventRequestContext: return APIGatewayEventRequestContext(self._data) @@ -316,3 +327,11 @@ def resolved_query_string_parameters(self) -> Optional[Dict[str, Any]]: return query_string return {} + + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + if self.headers is not None: + headers = {key.lower(): value.split(",") if "," in value else value for key, value in self.headers.items()} + return headers + + return {} diff --git a/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py b/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py index d9b45242376..0fa97036a3e 100644 --- a/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py +++ b/aws_lambda_powertools/utilities/data_classes/bedrock_agent_event.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper @@ -112,3 +112,7 @@ def query_string_parameters(self) -> Optional[Dict[str, str]]: @property def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: return self.query_string_parameters + + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + return {} diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index d2cf57d4af5..0560159ecc5 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -114,6 +114,21 @@ def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: """ return self.query_string_parameters + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + """ + This property determines the appropriate header to be used + as a trusted source for validating OpenAPI. + + This is necessary because different resolvers use different formats to encode + headers parameters. + + Headers are case-insensitive according to RFC 7540 (HTTP/2), so we lower the header name + This ensures that customers can access headers with any casing, as per the RFC guidelines. + Reference: https://www.rfc-editor.org/rfc/rfc7540#section-8.1.2 + """ + return self.headers + @property def is_base64_encoded(self) -> Optional[bool]: return self.get("isBase64Encoded") diff --git a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py index 633ce068f6e..f12c53d841a 100644 --- a/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py +++ b/aws_lambda_powertools/utilities/data_classes/vpc_lattice.py @@ -145,6 +145,14 @@ def query_string_parameters(self) -> Dict[str, str]: def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: return self.query_string_parameters + @property + def resolved_headers_field(self) -> Optional[Dict[str, Any]]: + if self.headers is not None: + headers = {key.lower(): value.split(",") if "," in value else value for key, value in self.headers.items()} + return headers + + return {} + class vpcLatticeEventV2Identity(DictWrapper): @property @@ -259,3 +267,10 @@ def query_string_parameters(self) -> Optional[Dict[str, str]]: @property def resolved_query_string_parameters(self) -> Optional[Dict[str, str]]: return self.query_string_parameters + + @property + def resolved_headers_field(self) -> Optional[Dict[str, str]]: + if self.headers is not None: + return {key.lower(): value for key, value in self.headers.items()} + + return {} diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 86b97c87e4b..32631ac867e 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -368,13 +368,13 @@ We use the `Annotated` and OpenAPI `Body` type to instruct Event Handler that ou !!! info "We will automatically validate and inject incoming query strings via type annotation." -We use the `Annotated` type to tell Event Handler that a particular parameter is not only an optional string, but also a query string with constraints. +We use the `Annotated` type to tell the Event Handler that a particular parameter is not only an optional string, but also a query string with constraints. In the following example, we use a new `Query` OpenAPI type to add [one out of many possible constraints](#customizing-openapi-parameters), which should read as: * `completed` is a query string with a `None` as its default value * `completed`, when set, should have at minimum 4 characters -* Doesn't match? Event Handler will return a validation error response +* No match? Event Handler will return a validation error response @@ -386,7 +386,7 @@ In the following example, we use a new `Query` OpenAPI type to add [one out of m 1. If you're not using Python 3.9 or higher, you can install and use [`typing_extensions`](https://pypi.org/project/typing-extensions/){target="_blank" rel="nofollow"} to the same effect 2. `Query` is a special OpenAPI type that can add constraints to a query string as well as document them - 3. **First time seeing the `Annotated`?**

This special type uses the first argument as the actual type, and subsequent arguments are metadata.

At runtime, static checkers will also see the first argument, but anyone receiving them could inspect them to fetch their metadata. + 3. **First time seeing `Annotated`?**

This special type uses the first argument as the actual type, and subsequent arguments as metadata.

At runtime, static checkers will also see the first argument, but any receiver can inspect it to get the metadata. === "skip_validating_query_strings.py" @@ -424,6 +424,40 @@ For example, we could validate that `` dynamic path should be no greate 1. `Path` is a special OpenAPI type that allows us to constrain todo_id to be less than 999. +#### Validating headers + +We use the `Annotated` type to tell the Event Handler that a particular parameter is a header that needs to be validated. + +!!! info "We adhere to [HTTP RFC standards](https://www.rfc-editor.org/rfc/rfc7540#section-8.1.2){target="_blank" rel="nofollow"}, which means we treat HTTP headers as case-insensitive." + +In the following example, we use a new `Header` OpenAPI type to add [one out of many possible constraints](#customizing-openapi-parameters), which should read as: + +* `correlation_id` is a header that must be present in the request +* `correlation_id` should have 16 characters +* No match? Event Handler will return a validation error response + + + +=== "validating_headers.py" + + ```python hl_lines="8 10 27" + --8<-- "examples/event_handler_rest/src/validating_headers.py" + ``` + + 1. If you're not using Python 3.9 or higher, you can install and use [`typing_extensions`](https://pypi.org/project/typing-extensions/){target="_blank" rel="nofollow"} to the same effect + 2. `Header` is a special OpenAPI type that can add constraints and documentation to a header + 3. **First time seeing `Annotated`?**

This special type uses the first argument as the actual type, and subsequent arguments as metadata.

At runtime, static checkers will also see the first argument, but any receiver can inspect it to get the metadata. + +=== "working_with_headers_multi_value.py" + + You can handle multi-value headers by declaring it as a list of the desired type. + + ```python hl_lines="23" + --8<-- "examples/event_handler_rest/src/working_with_headers_multi_value.py" + ``` + + 1. `cloudfront_viewer_country` is a list that must contain values from the `CountriesAllowed` enumeration. + ### Accessing request details Event Handler integrates with [Event Source Data Classes utilities](../../utilities/data_classes.md){target="_blank"}, and it exposes their respective resolver request details and convenient methods under `app.current_event`. diff --git a/examples/event_handler_rest/src/validating_headers.py b/examples/event_handler_rest/src/validating_headers.py new file mode 100644 index 00000000000..e830a49c38c --- /dev/null +++ b/examples/event_handler_rest/src/validating_headers.py @@ -0,0 +1,39 @@ +from typing import List, Optional + +import requests +from pydantic import BaseModel, Field + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.openapi.params import Header # (2)! +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.shared.types import Annotated # (1)! +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(enable_validation=True) + + +class Todo(BaseModel): + userId: int + id_: Optional[int] = Field(alias="id", default=None) + title: str + completed: bool + + +@app.get("/todos") +@tracer.capture_method +def get_todos(correlation_id: Annotated[str, Header(min_length=16, max_length=16)]) -> List[Todo]: # (3)! + url = "https://jsonplaceholder.typicode.com/todos" + + todo = requests.get(url, headers={"correlation_id": correlation_id}) + todo.raise_for_status() + + return todo.json() + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/working_with_headers_multi_value.py b/examples/event_handler_rest/src/working_with_headers_multi_value.py new file mode 100644 index 00000000000..956fd58b14d --- /dev/null +++ b/examples/event_handler_rest/src/working_with_headers_multi_value.py @@ -0,0 +1,34 @@ +from enum import Enum +from typing import List + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.openapi.params import Header +from aws_lambda_powertools.shared.types import Annotated +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = APIGatewayRestResolver(enable_validation=True) + + +class CountriesAllowed(Enum): + """Example of an Enum class.""" + + US = "US" + PT = "PT" + BR = "BR" + + +@app.get("/hello") +def get( + cloudfront_viewer_country: Annotated[ + List[CountriesAllowed], # (1)! + Header( + description="This is multi value header parameter.", + ), + ], +): + """Return validated multi-value header values.""" + return cloudfront_viewer_country + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/events/albMultiValueQueryStringEvent.json b/tests/events/albMultiValueQueryStringEvent.json index 4584ba7c477..d5cdf18f023 100644 --- a/tests/events/albMultiValueQueryStringEvent.json +++ b/tests/events/albMultiValueQueryStringEvent.json @@ -14,6 +14,13 @@ "accept": [ "*/*" ], + "header2": [ + "value1", + "value2" + ], + "header1": [ + "value1" + ], "host": [ "alb-c-LoadB-14POFKYCLBNSF-1815800096.eu-central-1.elb.amazonaws.com" ], diff --git a/tests/events/apiGatewayProxyEvent.json b/tests/events/apiGatewayProxyEvent.json index 3f095e28e45..da814c91100 100644 --- a/tests/events/apiGatewayProxyEvent.json +++ b/tests/events/apiGatewayProxyEvent.json @@ -78,4 +78,4 @@ "stageVariables": null, "body": "Hello from Lambda!", "isBase64Encoded": false -} \ No newline at end of file +} diff --git a/tests/events/lambdaFunctionUrlEventWithHeaders.json b/tests/events/lambdaFunctionUrlEventWithHeaders.json index e453690d9b3..d1cc50630a8 100644 --- a/tests/events/lambdaFunctionUrlEventWithHeaders.json +++ b/tests/events/lambdaFunctionUrlEventWithHeaders.json @@ -23,7 +23,9 @@ "cache-control":"max-age=0", "accept-encoding":"gzip, deflate, br", "sec-fetch-dest":"document", - "user-agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36" + "user-agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36", + "header1": "value1", + "header2": "value1,value2" }, "queryStringParameters": { "parameter1": "value1,value2", diff --git a/tests/events/vpcLatticeEvent.json b/tests/events/vpcLatticeEvent.json index 936bfb22d1b..fa9031f7dc4 100644 --- a/tests/events/vpcLatticeEvent.json +++ b/tests/events/vpcLatticeEvent.json @@ -5,7 +5,9 @@ "user_agent": "curl/7.64.1", "x-forwarded-for": "10.213.229.10", "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws", - "accept": "*/*" + "accept": "*/*", + "header1": "value1", + "header2": "value1,value2" }, "query_string_parameters": { "order-id": "1" diff --git a/tests/events/vpcLatticeV2EventWithHeaders.json b/tests/events/vpcLatticeV2EventWithHeaders.json index 11b36ef118b..fdaf7dc7891 100644 --- a/tests/events/vpcLatticeV2EventWithHeaders.json +++ b/tests/events/vpcLatticeV2EventWithHeaders.json @@ -2,12 +2,31 @@ "version": "2.0", "path": "/newpath", "method": "GET", - "headers": { - "user_agent": "curl/7.64.1", - "x-forwarded-for": "10.213.229.10", - "host": "test-lambda-service-3908sdf9u3u.dkfjd93.vpc-lattice-svcs.us-east-2.on.aws", - "accept": "*/*" - }, + "headers":{ + "user-agent":[ + "curl/8.3.0" + ], + "accept":[ + "*/*" + ], + "powertools":[ + "a", + "b" + ], + "x-forwarded-for":[ + "172.31.40.143" + ], + "host":[ + "lattice-svc-027b423199122da5f.7d67968.vpc-lattice-svcs.us-east-1.on.aws" + ], + "Header1": [ + "value1" + ], + "Header2": [ + "value1", + "value2" + ] + }, "queryStringParameters": { "parameter1": [ "value1", diff --git a/tests/functional/event_handler/test_openapi_params.py b/tests/functional/event_handler/test_openapi_params.py index 2f48f5aa534..38b0cbed307 100644 --- a/tests/functional/event_handler/test_openapi_params.py +++ b/tests/functional/event_handler/test_openapi_params.py @@ -13,11 +13,11 @@ ) from aws_lambda_powertools.event_handler.openapi.params import ( Body, + Header, Param, ParamTypes, Query, _create_model_field, - _Header, ) from aws_lambda_powertools.shared.types import Annotated @@ -431,7 +431,7 @@ def handler(): def test_create_header(): - header = _Header(convert_underscores=True) + header = Header(convert_underscores=True) assert header.convert_underscores is True @@ -456,7 +456,7 @@ def test_create_model_field_with_empty_in(): # Tests that when we try to create a model field with convert_underscore, we convert the field name def test_create_model_field_convert_underscore(): - field_info = _Header(alias=None, convert_underscores=True) + field_info = Header(alias=None, convert_underscores=True) result = _create_model_field(field_info, int, "user_id", False) assert result.alias == "user-id" diff --git a/tests/functional/event_handler/test_openapi_validation_middleware.py b/tests/functional/event_handler/test_openapi_validation_middleware.py index 23fa131ab9f..07e2a34ac42 100644 --- a/tests/functional/event_handler/test_openapi_validation_middleware.py +++ b/tests/functional/event_handler/test_openapi_validation_middleware.py @@ -4,6 +4,7 @@ from pathlib import PurePath from typing import List, Tuple +import pytest from pydantic import BaseModel from aws_lambda_powertools.event_handler import ( @@ -12,9 +13,10 @@ APIGatewayRestResolver, LambdaFunctionUrlResolver, Response, + VPCLatticeResolver, VPCLatticeV2Resolver, ) -from aws_lambda_powertools.event_handler.openapi.params import Body, Query +from aws_lambda_powertools.event_handler.openapi.params import Body, Header, Query from aws_lambda_powertools.shared.types import Annotated from tests.functional.utils import load_event @@ -23,6 +25,7 @@ LOAD_GW_EVENT_ALB = load_event("albMultiValueQueryStringEvent.json") LOAD_GW_EVENT_LAMBDA_URL = load_event("lambdaFunctionUrlEventWithHeaders.json") LOAD_GW_EVENT_VPC_LATTICE = load_event("vpcLatticeV2EventWithHeaders.json") +LOAD_GW_EVENT_VPC_LATTICE_V1 = load_event("vpcLatticeEvent.json") def test_validate_scalars(): @@ -417,267 +420,601 @@ def handler(user: Model) -> Response[Model]: assert "missing" in result["body"] -def test_validate_rest_api_resolver_with_multi_query_params(): - # GIVEN an APIGatewayRestResolver with validation enabled +########### TEST WITH QUERY PARAMS +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_without_query_params", 200, None), + ], +) +def test_validation_query_string_with_api_rest_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a APIGatewayRestResolver with validation enabled app = APIGatewayRestResolver(enable_validation=True) - # WHEN a handler is defined with a default scalar parameter and a list - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) - LOAD_GW_EVENT["httpMethod"] = "GET" LOAD_GW_EVENT["path"] = "/users" + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT, {}) - assert result["statusCode"] == 200 + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) -def test_validate_rest_api_resolver_with_multi_query_params_fail(): - # GIVEN an APIGatewayRestResolver with validation enabled - app = APIGatewayRestResolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(parameter1: Annotated[List[int], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler2(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) - LOAD_GW_EVENT["httpMethod"] = "GET" - LOAD_GW_EVENT["path"] = "/users" + # Define handler3 without params + if handler_func == "handler3_without_query_params": + LOAD_GW_EVENT["queryStringParameters"] = None + LOAD_GW_EVENT["multiValueQueryStringParameters"] = None - # THEN the handler should be invoked and return 422 + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code result = app(LOAD_GW_EVENT, {}) - assert result["statusCode"] == 422 - assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + assert result["statusCode"] == expected_status_code + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) -def test_validate_rest_api_resolver_without_query_params(): - # GIVEN an APIGatewayRestResolver with validation enabled - app = APIGatewayRestResolver(enable_validation=True) - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(): - return None +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_without_query_params", 200, None), + ], +) +def test_validation_query_string_with_api_http_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a APIGatewayHttpResolver with validation enabled + app = APIGatewayHttpResolver(enable_validation=True) - LOAD_GW_EVENT["httpMethod"] = "GET" - LOAD_GW_EVENT["path"] = "/users" - LOAD_GW_EVENT["queryStringParameters"] = None - LOAD_GW_EVENT["multiValueQueryStringParameters"] = None + LOAD_GW_EVENT_HTTP["rawPath"] = "/users" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 422 - result = app(LOAD_GW_EVENT, {}) - assert result["statusCode"] == 200 + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) -def test_validate_http_resolver_with_multi_query_params(): - # GIVEN an APIGatewayHttpResolver with validation enabled - app = APIGatewayHttpResolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined with a default scalar parameter and a list - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler2(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) - LOAD_GW_EVENT_HTTP["rawPath"] = "/users" - LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" - LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + # Define handler3 without params + if handler_func == "handler3_without_query_params": + LOAD_GW_EVENT_HTTP["queryStringParameters"] = None - # THEN the handler should be invoked and return 200 + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code result = app(LOAD_GW_EVENT_HTTP, {}) - assert result["statusCode"] == 200 + assert result["statusCode"] == expected_status_code + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) -def test_validate_http_resolver_with_multi_query_values_fail(): - # GIVEN an APIGatewayHttpResolver with validation enabled - app = APIGatewayHttpResolver(enable_validation=True) - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(parameter1: Annotated[List[int], Query()], parameter2: str): - print(parameter2) +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_without_query_params", 200, None), + ], +) +def test_validation_query_string_with_alb_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a ALBResolver with validation enabled + app = ALBResolver(enable_validation=True) - LOAD_GW_EVENT_HTTP["rawPath"] = "/users" - LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" - LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" + LOAD_GW_EVENT_ALB["path"] = "/users" + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 422 - result = app(LOAD_GW_EVENT_HTTP, {}) - assert result["statusCode"] == 422 - assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) -def test_validate_http_resolver_without_query_params(): - # GIVEN an APIGatewayHttpResolver with validation enabled - app = APIGatewayHttpResolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": + + @app.get("/users") + def handler2(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + # Define handler3 without params + if handler_func == "handler3_without_query_params": + LOAD_GW_EVENT_HTTP["multiValueQueryStringParameters"] = None + + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_ALB, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) + + +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_without_query_params", 200, None), + ], +) +def test_validation_query_string_with_lambda_url_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a LambdaFunctionUrlResolver with validation enabled + app = LambdaFunctionUrlResolver(enable_validation=True) + + LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" + LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + # WHEN a handler is defined with various parameters and routes + + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + + @app.get("/users") + def handler1(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": + + @app.get("/users") + def handler2(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + # Define handler3 without params + if handler_func == "handler3_without_query_params": + LOAD_GW_EVENT_LAMBDA_URL["queryStringParameters"] = None + + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) + + +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_without_query_params", 200, None), + ], +) +def test_validation_query_string_with_vpc_lattice_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a VPCLatticeV2Resolver with validation enabled + app = VPCLatticeV2Resolver(enable_validation=True) + + LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + + # WHEN a handler is defined with various parameters and routes + + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + + @app.get("/users") + def handler1(parameter1: Annotated[List[str], Query()], parameter2: str): + print(parameter2) + + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": + + @app.get("/users") + def handler2(parameter1: Annotated[List[int], Query()], parameter2: str): + print(parameter2) + + # Define handler3 without params + if handler_func == "handler3_without_query_params": + LOAD_GW_EVENT_VPC_LATTICE["queryStringParameters"] = None + + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) + + +########### TEST WITH HEADER PARAMS +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_api_rest_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a APIGatewayRestResolver with validation enabled + app = APIGatewayRestResolver(enable_validation=True) - # WHEN a handler is defined without any query params - @app.get("/users") - def handler(): - return None + LOAD_GW_EVENT["httpMethod"] = "GET" + LOAD_GW_EVENT["path"] = "/users" + # WHEN a handler is defined with various parameters and routes + + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) + + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": + + @app.get("/users") + def handler2(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) + + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": + + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT["headers"] = None + LOAD_GW_EVENT["multiValueHeaders"] = None + + @app.get("/users") + def handler4(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) + + +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_http_rest_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a APIGatewayHttpResolver with validation enabled + app = APIGatewayHttpResolver(enable_validation=True) LOAD_GW_EVENT_HTTP["rawPath"] = "/users" LOAD_GW_EVENT_HTTP["requestContext"]["http"]["method"] = "GET" LOAD_GW_EVENT_HTTP["requestContext"]["http"]["path"] = "/users" - LOAD_GW_EVENT_HTTP["queryStringParameters"] = None + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_HTTP, {}) - assert result["statusCode"] == 200 + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) -def test_validate_alb_resolver_with_multi_query_values(): - # GIVEN an ALBResolver with validation enabled - app = ALBResolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined with a default scalar parameter and a list - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler2(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) - LOAD_GW_EVENT_ALB["path"] = "/users" + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_ALB, {}) - assert result["statusCode"] == 200 + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT_HTTP["headers"] = None -def test_validate_alb_resolver_with_multi_query_values_fail(): - # GIVEN an ALBResolver with validation enabled - app = ALBResolver(enable_validation=True) - - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(parameter1: Annotated[List[int], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler4(): + return None - LOAD_GW_EVENT_ALB["path"] = "/users" + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_HTTP, {}) + assert result["statusCode"] == expected_status_code - # THEN the handler should be invoked and return 422 - result = app(LOAD_GW_EVENT_ALB, {}) - assert result["statusCode"] == 422 - assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) -def test_validate_alb_resolver_without_query_params(): - # GIVEN an ALBResolver with validation enabled +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_alb_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a ALBResolver with validation enabled app = ALBResolver(enable_validation=True) - # WHEN a handler is defined without any query params - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) - LOAD_GW_EVENT_ALB["path"] = "/users" - LOAD_GW_EVENT_HTTP["multiValueQueryStringParameters"] = None + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_ALB, {}) - assert result["statusCode"] == 200 + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) -def test_validate_lambda_url_resolver_with_multi_query_params(): - # GIVEN an LambdaFunctionUrlResolver with validation enabled - app = LambdaFunctionUrlResolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined with a default scalar parameter and a list - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler2(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) - LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" - LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" - LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) - assert result["statusCode"] == 200 + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT_ALB["multiValueHeaders"] = None -def test_validate_lambda_url_resolver_with_multi_query_params_fail(): - # GIVEN an LambdaFunctionUrlResolver with validation enabled - app = LambdaFunctionUrlResolver(enable_validation=True) + @app.get("/users") + def handler4(): + return None - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(parameter1: Annotated[List[int], Query()], parameter2: str): - print(parameter2) + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_ALB, {}) + assert result["statusCode"] == expected_status_code - LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" - LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" - LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) - # THEN the handler should be invoked and return 422 - result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) - assert result["statusCode"] == 422 - assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) - -def test_validate_lambda_url_resolver_without_query_params(): - # GIVEN an LambdaFunctionUrlResolver with validation enabled +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_lambda_url_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a LambdaFunctionUrlResolver with validation enabled app = LambdaFunctionUrlResolver(enable_validation=True) - # WHEN a handler is defined without any query params - @app.get("/users") - def handler(): - return None - LOAD_GW_EVENT_LAMBDA_URL["rawPath"] = "/users" LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["method"] = "GET" LOAD_GW_EVENT_LAMBDA_URL["requestContext"]["http"]["path"] = "/users" - LOAD_GW_EVENT_LAMBDA_URL["queryStringParameters"] = None + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) - assert result["statusCode"] == 200 + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) -def test_validate_vpc_lattice_resolver_with_multi_params_values(): - # GIVEN an VPCLatticeV2Resolver with validation enabled - app = VPCLatticeV2Resolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined with a default scalar parameter and a list - @app.get("/users") - def handler(parameter1: Annotated[List[str], Query()], parameter2: str): - print(parameter2) + @app.get("/users") + def handler2(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) - LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": - # THEN the handler should be invoked and return 200 - result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) - assert result["statusCode"] == 200 + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT_LAMBDA_URL["headers"] = None -def test_validate_vpc_lattice_resolver_with_multi_query_params_fail(): - # GIVEN an VPCLatticeV2Resolver with validation enabled - app = VPCLatticeV2Resolver(enable_validation=True) + @app.get("/users") + def handler4(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_LAMBDA_URL, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) - # WHEN a handler is defined with a default scalar parameter and a list with wrong type - @app.get("/users") - def handler(parameter1: Annotated[List[int], Query()], parameter2: str): - print(parameter2) + +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_vpc_lattice_v1_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a VPCLatticeResolver with validation enabled + app = VPCLatticeResolver(enable_validation=True) + + LOAD_GW_EVENT_VPC_LATTICE_V1["raw_path"] = "/users" + LOAD_GW_EVENT_VPC_LATTICE_V1["method"] = "GET" + # WHEN a handler is defined with various parameters and routes + + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) + + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": + + @app.get("/users") + def handler2(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) + + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": + + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT_VPC_LATTICE_V1["headers"] = None + + @app.get("/users") + def handler4(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code + result = app(LOAD_GW_EVENT_VPC_LATTICE_V1, {}) + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) + + +@pytest.mark.parametrize( + "handler_func, expected_status_code, expected_error_text", + [ + ("handler1_with_correct_params", 200, None), + ("handler2_with_wrong_params", 422, "['type_error.integer', 'int_parsing']"), + ("handler3_with_uppercase_params", 200, None), + ("handler4_without_header_params", 200, None), + ], +) +def test_validation_header_with_vpc_lattice_v2_resolver(handler_func, expected_status_code, expected_error_text): + # GIVEN a VPCLatticeV2Resolver with validation enabled + app = VPCLatticeV2Resolver(enable_validation=True) LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" + LOAD_GW_EVENT_VPC_LATTICE["method"] = "GET" + # WHEN a handler is defined with various parameters and routes - # THEN the handler should be invoked and return 422 - result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) - assert result["statusCode"] == 422 - assert any(text in result["body"] for text in ["type_error.integer", "int_parsing"]) + # Define handler1 with correct params + if handler_func == "handler1_with_correct_params": + @app.get("/users") + def handler1(header2: Annotated[List[str], Header()], header1: Annotated[str, Header()]): + print(header2) -def test_validate_vpc_lattice_resolver_without_query_params(): - # GIVEN an VPCLatticeV2Resolver with validation enabled - app = VPCLatticeV2Resolver(enable_validation=True) + # Define handler2 with wrong params + if handler_func == "handler2_with_wrong_params": - # WHEN a handler is defined without any query params - @app.get("/users") - def handler(): - return None + @app.get("/users") + def handler1(header2: Annotated[List[int], Header()], header1: Annotated[str, Header()]): + print(header2) - LOAD_GW_EVENT_VPC_LATTICE["path"] = "/users" - LOAD_GW_EVENT_VPC_LATTICE["queryStringParameters"] = None + # Define handler3 with uppercase parameters + if handler_func == "handler3_with_uppercase_params": - # THEN the handler should be invoked and return 200 + @app.get("/users") + def handler3( + header2: Annotated[List[str], Header(name="Header2")], + header1: Annotated[str, Header(name="Header1")], + ): + print(header2) + + # Define handler4 without params + if handler_func == "handler4_without_header_params": + LOAD_GW_EVENT_VPC_LATTICE["headers"] = None + + @app.get("/users") + def handler3(): + return None + + # THEN the handler should be invoked with the expected result + # AND the status code should match the expected_status_code result = app(LOAD_GW_EVENT_VPC_LATTICE, {}) - assert result["statusCode"] == 200 + assert result["statusCode"] == expected_status_code + + # IF expected_error_text is provided, THEN check for its presence in the response body + if expected_error_text: + assert any(text in result["body"] for text in expected_error_text) From a15a3587d93391c41e8872f350a3d9237c4fd750 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 22:49:32 +0000 Subject: [PATCH 29/32] chore(deps-dev): bump aws-cdk from 2.124.0 to 2.125.0 (#3693) Bumps [aws-cdk](https://github.com/aws/aws-cdk/tree/HEAD/packages/aws-cdk) from 2.124.0 to 2.125.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/commits/v2.125.0/packages/aws-cdk) --- updated-dependencies: - dependency-name: aws-cdk dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index cdfbdeceff1..514a6b2f6de 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "package-lock.json": "^1.0.0" }, "devDependencies": { - "aws-cdk": "^2.124.0" + "aws-cdk": "^2.125.0" } }, "node_modules/aws-cdk": { - "version": "2.124.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.124.0.tgz", - "integrity": "sha512-kUOfqwIAaTEx4ZozojZEhWa8G+O9KU+P0tERtDVmTw9ip4QXNMwTTkjj/IPtoH8qfXGdeibTQ9MJwRvHOR8kXQ==", + "version": "2.125.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.125.0.tgz", + "integrity": "sha512-6qFtaDPzhddhwIbCpqBjMePzZS7bfthGFQYfcwF1OhqMv2f3VpHQQ0f7kz4UxXJXUIR5BbgCnlpawH3c0aNzKw==", "dev": true, "bin": { "cdk": "bin/cdk" diff --git a/package.json b/package.json index bf23efc23e3..78063c6eb64 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.124.0" + "aws-cdk": "^2.125.0" }, "dependencies": { "package-lock.json": "^1.0.0" From 90df6b168bed04102b567a43cfe2f0867395b85b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 07:30:18 +0000 Subject: [PATCH 30/32] chore(ci): changelog rebuild (#3696) --- CHANGELOG.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b5024e61b44..59162d1c4bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,26 +10,29 @@ ## Documentation +* **data-masking:** add docs for data masking utility ([#3186](https://github.com/aws-powertools/powertools-lambda-python/issues/3186)) * **metrics:** fix empty metric warning filter ([#3660](https://github.com/aws-powertools/powertools-lambda-python/issues/3660)) * **proccess:** add versioning and maintenance policy ([#3682](https://github.com/aws-powertools/powertools-lambda-python/issues/3682)) ## Features +* **event_handler:** support Header parameter validation in OpenAPI schema ([#3687](https://github.com/aws-powertools/powertools-lambda-python/issues/3687)) * **event_handler:** add support for multiValueQueryStringParameters in OpenAPI schema ([#3667](https://github.com/aws-powertools/powertools-lambda-python/issues/3667)) ## Maintenance -* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) -* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) * **deps:** bump squidfunk/mkdocs-material from `9aad7af` to `a4a2029` in /docs ([#3679](https://github.com/aws-powertools/powertools-lambda-python/issues/3679)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) * **deps:** bump codecov/codecov-action from 3.1.5 to 3.1.6 ([#3683](https://github.com/aws-powertools/powertools-lambda-python/issues/3683)) +* **deps:** bump squidfunk/mkdocs-material from `58eef6c` to `9aad7af` in /docs ([#3670](https://github.com/aws-powertools/powertools-lambda-python/issues/3670)) * **deps:** bump pydantic from 1.10.13 to 1.10.14 ([#3655](https://github.com/aws-powertools/powertools-lambda-python/issues/3655)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#3665](https://github.com/aws-powertools/powertools-lambda-python/issues/3665)) +* **deps:** bump codecov/codecov-action from 3.1.4 to 3.1.5 ([#3674](https://github.com/aws-powertools/powertools-lambda-python/issues/3674)) +* **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) * **deps-dev:** bump aws-cdk from 2.123.0 to 2.124.0 ([#3678](https://github.com/aws-powertools/powertools-lambda-python/issues/3678)) -* **deps-dev:** bump sentry-sdk from 1.39.2 to 1.40.0 ([#3684](https://github.com/aws-powertools/powertools-lambda-python/issues/3684)) * **deps-dev:** bump ruff from 0.1.13 to 0.1.14 ([#3656](https://github.com/aws-powertools/powertools-lambda-python/issues/3656)) +* **deps-dev:** bump sentry-sdk from 1.39.2 to 1.40.0 ([#3684](https://github.com/aws-powertools/powertools-lambda-python/issues/3684)) * **deps-dev:** bump ruff from 0.1.14 to 0.1.15 ([#3685](https://github.com/aws-powertools/powertools-lambda-python/issues/3685)) -* **deps-dev:** bump aws-cdk from 2.122.0 to 2.123.0 ([#3673](https://github.com/aws-powertools/powertools-lambda-python/issues/3673)) +* **deps-dev:** bump aws-cdk from 2.124.0 to 2.125.0 ([#3693](https://github.com/aws-powertools/powertools-lambda-python/issues/3693)) From 94c588e7db8a65354dcd7a8be821ae41f3068adc Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 2 Feb 2024 09:23:07 +0000 Subject: [PATCH 31/32] fix(data-masking): fix and improve e2e tests for DataMasking (#3695) Fixing and improving e2e tests for DataMasking Co-authored-by: Heitor Lessa --- .../e2e/data_masking/test_e2e_data_masking.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/e2e/data_masking/test_e2e_data_masking.py b/tests/e2e/data_masking/test_e2e_data_masking.py index a720a265d83..3ee2400b5cc 100644 --- a/tests/e2e/data_masking/test_e2e_data_masking.py +++ b/tests/e2e/data_masking/test_e2e_data_masking.py @@ -2,16 +2,23 @@ from uuid import uuid4 import pytest -from aws_encryption_sdk.exceptions import DecryptKeyError from aws_lambda_powertools.utilities.data_masking import DataMasking -from aws_lambda_powertools.utilities.data_masking.exceptions import DataMaskingContextMismatchError +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingContextMismatchError, + DataMaskingDecryptKeyError, +) from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( AWSEncryptionSDKProvider, ) from tests.e2e.utils import data_fetcher +@pytest.fixture +def security_context(): + return {"this": "is_secure"} + + @pytest.fixture def basic_handler_fn(infrastructure: dict) -> str: return infrastructure.get("BasicHandler", "") @@ -53,36 +60,35 @@ def test_encryption(data_masker): @pytest.mark.xdist_group(name="data_masking") -def test_encryption_context(data_masker): +def test_encryption_context(data_masker, security_context): # GIVEN an instantiation of DataMasking with the AWS encryption provider value = [1, 2, "string", 4.5] - context = {"this": "is_secure"} # WHEN encrypting and then decrypting the encrypted data with an encryption_context - encrypted_data = data_masker.encrypt(value, encryption_context=context) - decrypted_data = data_masker.decrypt(encrypted_data, encryption_context=context) + encrypted_data = data_masker.encrypt(value, **security_context) + decrypted_data = data_masker.decrypt(encrypted_data, **security_context) # THEN the result is the original input data assert decrypted_data == value @pytest.mark.xdist_group(name="data_masking") -def test_encryption_context_mismatch(data_masker): +def test_encryption_context_mismatch(data_masker, security_context): # GIVEN an instantiation of DataMasking with the AWS encryption provider value = [1, 2, "string", 4.5] # WHEN encrypting with a encryption_context - encrypted_data = data_masker.encrypt(value, encryption_context={"this": "is_secure"}) + encrypted_data = data_masker.encrypt(value, **security_context) # THEN decrypting with a different encryption_context should raise a ContextMismatchError with pytest.raises(DataMaskingContextMismatchError): - data_masker.decrypt(encrypted_data, encryption_context={"not": "same_context"}) + data_masker.decrypt(encrypted_data, this="different_context") @pytest.mark.xdist_group(name="data_masking") -def test_encryption_no_context_fail(data_masker): +def test_encryption_no_context_fail(data_masker, security_context): # GIVEN an instantiation of DataMasking with the AWS encryption provider value = [1, 2, "string", 4.5] @@ -92,7 +98,7 @@ def test_encryption_no_context_fail(data_masker): # THEN decrypting with an encryption_context should raise a ContextMismatchError with pytest.raises(DataMaskingContextMismatchError): - data_masker.decrypt(encrypted_data, encryption_context={"this": "is_secure"}) + data_masker.decrypt(encrypted_data, **security_context) @pytest.mark.xdist_group(name="data_masking") @@ -106,7 +112,7 @@ def test_encryption_decryption_key_mismatch(data_masker, kms_key2_arn): # THEN when decrypting with a different key it should fail data_masker_key2 = DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key2_arn])) - with pytest.raises(DecryptKeyError): + with pytest.raises(DataMaskingDecryptKeyError): data_masker_key2.decrypt(encrypted_data) From 8f4a46af23d3670166d5c2d0b1117135addae653 Mon Sep 17 00:00:00 2001 From: "Powertools for AWS Lambda (Python) bot" Date: Fri, 2 Feb 2024 09:55:39 +0000 Subject: [PATCH 32/32] chore: version bump --- aws_lambda_powertools/shared/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py index adcf3dc5272..7cac1198767 100644 --- a/aws_lambda_powertools/shared/version.py +++ b/aws_lambda_powertools/shared/version.py @@ -1,3 +1,3 @@ """Exposes version constant to avoid circular dependencies.""" -VERSION = "2.32.0" +VERSION = "2.33.0" diff --git a/pyproject.toml b/pyproject.toml index cb1f322e9ba..98917d05f5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.32.0" +version = "2.33.0" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]