diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 326b701e32a..c3b00a4b34a 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -28,7 +28,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index 4537173ac39..045edb432ba 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -17,6 +17,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: 'Dependency Review'
- uses: actions/dependency-review-action@9129d7d40b8c12c1ed0f60400d00c92d437adcce # v4.1.3
+ uses: actions/dependency-review-action@5bbc3ba658137598168acb2ab73b21c432dd411b # v4.2.5
diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml
index 2a03f56e4c4..d1348eca351 100644
--- a/.github/workflows/label_pr_on_title.yml
+++ b/.github/workflows/label_pr_on_title.yml
@@ -50,7 +50,7 @@ jobs:
pull-requests: write # label respective PR
steps:
- name: Checkout repository
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Label PR based on title"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml
index 2806cab2962..46b84971c41 100644
--- a/.github/workflows/on_label_added.yml
+++ b/.github/workflows/on_label_added.yml
@@ -47,7 +47,7 @@ jobs:
permissions:
pull-requests: write # comment on PR
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
# Maintenance: Persist state per PR as an artifact to avoid spam on label add
- name: "Suggest split large Pull Request"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml
index 80f6460bfa9..b0154f45e1f 100644
--- a/.github/workflows/on_merged_pr.yml
+++ b/.github/workflows/on_merged_pr.yml
@@ -49,7 +49,7 @@ jobs:
issues: write # label issue with pending-release
if: needs.get_pr_details.outputs.prIsMerged == 'true'
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Label PR related issue for release"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml
index 7c4fda8ff04..60a1782e186 100644
--- a/.github/workflows/on_opened_pr.yml
+++ b/.github/workflows/on_opened_pr.yml
@@ -47,7 +47,7 @@ jobs:
needs: get_pr_details
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Ensure related issue is present"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
@@ -66,7 +66,7 @@ jobs:
permissions:
pull-requests: write # label and comment on PR if missing acknowledge section (requirement)
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Ensure acknowledgement section is present"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
diff --git a/.github/workflows/ossf_scorecard.yml b/.github/workflows/ossf_scorecard.yml
index d9e065bf1e8..4544493a3e1 100644
--- a/.github/workflows/ossf_scorecard.yml
+++ b/.github/workflows/ossf_scorecard.yml
@@ -22,7 +22,7 @@ jobs:
steps:
- name: "Checkout code"
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
persist-credentials: false
diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml
index 8fb31e6c4c7..81b1c0cc476 100644
--- a/.github/workflows/publish_v2_layer.yml
+++ b/.github/workflows/publish_v2_layer.yml
@@ -88,7 +88,7 @@ jobs:
working-directory: ./layer
steps:
- name: checkout
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -105,7 +105,7 @@ jobs:
with:
node-version: "16.12"
- name: Setup python
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: "3.12"
cache: "pip"
@@ -124,7 +124,7 @@ jobs:
- name: Set up Docker Buildx
id: builder
- uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c # v3.1.0
+ uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20 # v3.2.0
with:
install: true
driver: docker
@@ -247,7 +247,7 @@ jobs:
pages: none
steps:
- name: Checkout repository # reusable workflows start clean, so we need to checkout again
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml
index 2a203d5989f..6dca729946d 100644
--- a/.github/workflows/quality_check.yml
+++ b/.github/workflows/quality_check.yml
@@ -50,11 +50,11 @@ jobs:
permissions:
contents: read # checkout code only
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: Install poetry
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
@@ -71,7 +71,7 @@ jobs:
- name: Complexity baseline
run: make complexity-baseline
- name: Upload coverage to Codecov
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # 4.1.0
+ uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8 # 4.1.1
with:
file: ./coverage.xml
env_vars: PYTHON
diff --git a/.github/workflows/quality_check_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml
index 2d84f1154ba..c4676ed8777 100644
--- a/.github/workflows/quality_check_pydanticv2.yml
+++ b/.github/workflows/quality_check_pydanticv2.yml
@@ -49,11 +49,11 @@ jobs:
permissions:
contents: read # checkout code only
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: Install poetry
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml
index ddfd7c249a3..8f8e178fdef 100644
--- a/.github/workflows/record_pr.yml
+++ b/.github/workflows/record_pr.yml
@@ -46,7 +46,7 @@ jobs:
permissions:
contents: read # NOTE: treat as untrusted location
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Extract PR details"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index eafb6752f68..d083e142ed5 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -80,7 +80,7 @@ jobs:
RELEASE_VERSION="${RELEASE_TAG_VERSION:1}"
echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_OUTPUT"
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -115,7 +115,7 @@ jobs:
contents: read
steps:
# NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev)
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -131,7 +131,7 @@ jobs:
- name: Install poetry
run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0
- name: Set up Python
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: "3.12"
cache: "poetry"
@@ -156,7 +156,7 @@ jobs:
attestation_hashes: ${{ steps.encoded_hash.outputs.attestation_hashes }}
steps:
# NOTE: we need actions/checkout to configure git first (pre-commit hooks in make dev)
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -169,7 +169,7 @@ jobs:
- name: Install poetry
run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0
- name: Set up Python
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: "3.12"
cache: "poetry"
@@ -206,7 +206,7 @@ jobs:
# NOTE: provenance fails if we use action pinning... it's a Github limitation
# because SLSA needs to trace & attest it came from a given branch; pinning doesn't expose that information
# https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/generic/README.md#referencing-the-slsa-generator
- uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0
+ uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0
with:
base64-subjects: ${{ needs.build.outputs.attestation_hashes }}
upload-assets: false # we upload its attestation in create_tag job, otherwise it creates a new release
@@ -225,7 +225,7 @@ jobs:
RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }}
steps:
# NOTE: we need actions/checkout in order to use our local actions (e.g., ./.github/actions)
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -237,12 +237,12 @@ jobs:
- name: Upload to PyPi prod
if: ${{ !inputs.skip_pypi }}
- uses: pypa/gh-action-pypi-publish@e53eb8b103ffcb59469888563dc324e3c8ba6f06 # v1.8.12
+ uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14
# PyPi test maintenance affected us numerous times, leaving for history purposes
# - name: Upload to PyPi test
# if: ${{ !inputs.skip_pypi }}
- # uses: pypa/gh-action-pypi-publish@e53eb8b103ffcb59469888563dc324e3c8ba6f06 # v1.8.12
+ # uses: pypa/gh-action-pypi-publish@81e9d935c883d0b210363ab89cf05f3894778450 # v1.8.14
# with:
# repository-url: https://test.pypi.org/legacy/
@@ -259,7 +259,7 @@ jobs:
contents: write
steps:
# NOTE: we need actions/checkout to authenticate and configure git first
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -303,7 +303,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# NOTE: we need actions/checkout to authenticate and configure git first
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -357,7 +357,7 @@ jobs:
env:
RELEASE_VERSION: ${{ needs.seal.outputs.RELEASE_VERSION }}
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml
index 8e69453fd6b..f2839d9bc75 100644
--- a/.github/workflows/reusable_deploy_v2_layer_stack.yml
+++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml
@@ -140,7 +140,7 @@ jobs:
has_arm64_support: "true"
steps:
- name: checkout
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
@@ -162,7 +162,7 @@ jobs:
with:
node-version: "16.12"
- name: Setup python
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: "3.12"
cache: "pip"
diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml
index 3fddb088de7..cdca4ff8422 100644
--- a/.github/workflows/reusable_deploy_v2_sar.yml
+++ b/.github/workflows/reusable_deploy_v2_sar.yml
@@ -79,7 +79,7 @@ jobs:
architecture: ["x86_64", "arm64"]
steps:
- name: checkout
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ env.RELEASE_COMMIT }}
diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml
index 1e40867db15..be7eeca27a2 100644
--- a/.github/workflows/reusable_export_pr_details.yml
+++ b/.github/workflows/reusable_export_pr_details.yml
@@ -76,7 +76,7 @@ jobs:
prLabels: ${{ steps.prLabels.outputs.prLabels }}
steps:
- name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: "Download previously saved PR"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml
index b7936f2c349..39ffe543096 100644
--- a/.github/workflows/reusable_publish_changelog.yml
+++ b/.github/workflows/reusable_publish_changelog.yml
@@ -26,7 +26,7 @@ jobs:
pull-requests: write # create PR
steps:
- name: Checkout repository # reusable workflows start clean, so we need to checkout again
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
fetch-depth: 0
- name: "Generate latest changelog"
diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml
index bf48e938fe4..798e6a68670 100644
--- a/.github/workflows/reusable_publish_docs.yml
+++ b/.github/workflows/reusable_publish_docs.yml
@@ -44,14 +44,14 @@ jobs:
id-token: write # trade JWT token for AWS credentials in AWS Docs account
pages: write # uncomment if mike fails as we migrated to S3 hosting
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Install poetry
run: pipx install poetry
- name: Set up Python
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: "3.12"
cache: "poetry"
diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml
index bbfeb28c349..25e667993e0 100644
--- a/.github/workflows/run-e2e-tests.yml
+++ b/.github/workflows/run-e2e-tests.yml
@@ -51,11 +51,11 @@ jobs:
if: ${{ github.actor != 'dependabot[bot]' && github.repository == 'aws-powertools/powertools-lambda-python' }}
steps:
- name: "Checkout"
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: Install poetry
run: pipx install poetry
- name: "Use Python"
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
+ uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: ${{ matrix.version }}
architecture: "x64"
diff --git a/.github/workflows/secure_workflows.yml b/.github/workflows/secure_workflows.yml
index fb7601997c6..b1db349d5e8 100644
--- a/.github/workflows/secure_workflows.yml
+++ b/.github/workflows/secure_workflows.yml
@@ -30,7 +30,7 @@ jobs:
contents: read # checkout code and subsequently GitHub action workflows
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: Ensure 3rd party workflows have SHA pinned
uses: zgosalvez/github-actions-ensure-sha-pinned-actions@ba37328d4ea95eaf8b3bd6c6cef308f709a5f2ec # v3.0.3
with:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 449fb38033d..06d77913f14 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,13 +6,122 @@
## Bug Fixes
+* **event_handler:** always add 422 response to the schema ([#3995](https://github.com/aws-powertools/powertools-lambda-python/issues/3995))
+* **event_handler:** make decoded_body field optional in ApiGateway resolver ([#3937](https://github.com/aws-powertools/powertools-lambda-python/issues/3937))
+* **tracer:** add name sanitization for X-Ray subsegments ([#4005](https://github.com/aws-powertools/powertools-lambda-python/issues/4005))
+
+## Code Refactoring
+
+* **logger:** add type annotation for append_keys method ([#3988](https://github.com/aws-powertools/powertools-lambda-python/issues/3988))
+* **parameters:** improve typing for get_secret method ([#3910](https://github.com/aws-powertools/powertools-lambda-python/issues/3910))
+
+## Documentation
+
+* **bedrock-agents:** fix type in Bedrock operation example ([#3948](https://github.com/aws-powertools/powertools-lambda-python/issues/3948))
+* **tutorial:** fix "Simplifying with Tracer" section in the tutorial ([#3962](https://github.com/aws-powertools/powertools-lambda-python/issues/3962))
+
+## Features
+
+* **batch:** add flag in SqsFifoProcessor to enable continuous message processing ([#3954](https://github.com/aws-powertools/powertools-lambda-python/issues/3954))
+* **data_classes:** Add CloudWatchAlarmEvent data class ([#3868](https://github.com/aws-powertools/powertools-lambda-python/issues/3868))
+* **event-handler:** add compress option when serving Swagger HTML ([#3946](https://github.com/aws-powertools/powertools-lambda-python/issues/3946))
+* **event_handler:** define exception_handler directly from the router ([#3979](https://github.com/aws-powertools/powertools-lambda-python/issues/3979))
+* **parameters:** add feature for creating and updating Parameters and Secrets ([#2858](https://github.com/aws-powertools/powertools-lambda-python/issues/2858))
+* **tracer:** auto-disable tracer when for AWS SAM and Chalice environments ([#3949](https://github.com/aws-powertools/powertools-lambda-python/issues/3949))
+
+## Maintenance
+
+* **deps:** bump actions/checkout from 4.1.1 to 4.1.2 ([#3939](https://github.com/aws-powertools/powertools-lambda-python/issues/3939))
+* **deps:** bump datadog-lambda from 5.89.0 to 5.90.0 ([#3941](https://github.com/aws-powertools/powertools-lambda-python/issues/3941))
+* **deps:** bump redis from 5.0.2 to 5.0.3 ([#3929](https://github.com/aws-powertools/powertools-lambda-python/issues/3929))
+* **deps:** bump codecov/codecov-action from 4.1.0 to 4.1.1 ([#4021](https://github.com/aws-powertools/powertools-lambda-python/issues/4021))
+* **deps:** bump slsa-framework/slsa-github-generator from 1.9.0 to 1.10.0 ([#3997](https://github.com/aws-powertools/powertools-lambda-python/issues/3997))
+* **deps:** bump datadog-lambda from 5.90.0 to 5.91.0 ([#3958](https://github.com/aws-powertools/powertools-lambda-python/issues/3958))
+* **deps:** bump actions/dependency-review-action from 4.2.3 to 4.2.4 ([#4012](https://github.com/aws-powertools/powertools-lambda-python/issues/4012))
+* **deps:** bump pypa/gh-action-pypi-publish from 1.8.12 to 1.8.14 ([#3918](https://github.com/aws-powertools/powertools-lambda-python/issues/3918))
+* **deps:** bump actions/dependency-review-action from 4.1.3 to 4.2.3 ([#3993](https://github.com/aws-powertools/powertools-lambda-python/issues/3993))
+* **deps:** bump squidfunk/mkdocs-material from `6c81a89` to `3307665` in /docs ([#4017](https://github.com/aws-powertools/powertools-lambda-python/issues/4017))
+* **deps:** bump actions/dependency-review-action from 4.2.4 to 4.2.5 ([#4023](https://github.com/aws-powertools/powertools-lambda-python/issues/4023))
+* **deps:** bump docker/setup-buildx-action from 3.1.0 to 3.2.0 ([#3955](https://github.com/aws-powertools/powertools-lambda-python/issues/3955))
+* **deps:** bump actions/setup-python from 5.0.0 to 5.1.0 ([#4022](https://github.com/aws-powertools/powertools-lambda-python/issues/4022))
+* **deps:** bump aws-encryption-sdk from 3.1.1 to 3.2.0 ([#3983](https://github.com/aws-powertools/powertools-lambda-python/issues/3983))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#3972](https://github.com/aws-powertools/powertools-lambda-python/issues/3972))
+* **deps:** bump squidfunk/mkdocs-material from `3678304` to `6c81a89` in /docs ([#3973](https://github.com/aws-powertools/powertools-lambda-python/issues/3973))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 1 update ([#4001](https://github.com/aws-powertools/powertools-lambda-python/issues/4001))
+* **deps-dev:** bump mkdocs-material from 9.5.13 to 9.5.14 ([#3978](https://github.com/aws-powertools/powertools-lambda-python/issues/3978))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.90 to 0.1.91 ([#3975](https://github.com/aws-powertools/powertools-lambda-python/issues/3975))
+* **deps-dev:** bump types-python-dateutil from 2.9.0.20240315 to 2.9.0.20240316 ([#3977](https://github.com/aws-powertools/powertools-lambda-python/issues/3977))
+* **deps-dev:** bump aws-cdk-aws-lambda-python-alpha from 2.132.1a0 to 2.133.0a0 ([#3976](https://github.com/aws-powertools/powertools-lambda-python/issues/3976))
+* **deps-dev:** bump the boto-typing group with 2 updates ([#3982](https://github.com/aws-powertools/powertools-lambda-python/issues/3982))
+* **deps-dev:** bump the boto-typing group with 2 updates ([#3974](https://github.com/aws-powertools/powertools-lambda-python/issues/3974))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.91 to 0.1.94 ([#3985](https://github.com/aws-powertools/powertools-lambda-python/issues/3985))
+* **deps-dev:** bump ruff from 0.3.2 to 0.3.3 ([#3967](https://github.com/aws-powertools/powertools-lambda-python/issues/3967))
+* **deps-dev:** bump pytest-asyncio from 0.23.5.post1 to 0.23.6 ([#3984](https://github.com/aws-powertools/powertools-lambda-python/issues/3984))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3991](https://github.com/aws-powertools/powertools-lambda-python/issues/3991))
+* **deps-dev:** bump black from 24.2.0 to 24.3.0 ([#3968](https://github.com/aws-powertools/powertools-lambda-python/issues/3968))
+* **deps-dev:** bump types-python-dateutil from 2.8.19.20240311 to 2.9.0.20240315 ([#3966](https://github.com/aws-powertools/powertools-lambda-python/issues/3966))
+* **deps-dev:** bump aws-cdk from 2.132.1 to 2.133.0 ([#3963](https://github.com/aws-powertools/powertools-lambda-python/issues/3963))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3964](https://github.com/aws-powertools/powertools-lambda-python/issues/3964))
+* **deps-dev:** bump sentry-sdk from 1.42.0 to 1.43.0 ([#3992](https://github.com/aws-powertools/powertools-lambda-python/issues/3992))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.99 to 0.1.101 ([#4015](https://github.com/aws-powertools/powertools-lambda-python/issues/4015))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.89 to 0.1.90 ([#3957](https://github.com/aws-powertools/powertools-lambda-python/issues/3957))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3956](https://github.com/aws-powertools/powertools-lambda-python/issues/3956))
+* **deps-dev:** bump aws-cdk-lib from 2.132.1 to 2.133.0 ([#3965](https://github.com/aws-powertools/powertools-lambda-python/issues/3965))
+* **deps-dev:** bump coverage from 7.4.3 to 7.4.4 ([#3959](https://github.com/aws-powertools/powertools-lambda-python/issues/3959))
+* **deps-dev:** bump ruff from 0.3.3 to 0.3.4 ([#3996](https://github.com/aws-powertools/powertools-lambda-python/issues/3996))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.88 to 0.1.89 ([#3952](https://github.com/aws-powertools/powertools-lambda-python/issues/3952))
+* **deps-dev:** bump sentry-sdk from 1.41.0 to 1.42.0 ([#3951](https://github.com/aws-powertools/powertools-lambda-python/issues/3951))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3950](https://github.com/aws-powertools/powertools-lambda-python/issues/3950))
+* **deps-dev:** bump pytest-mock from 3.12.0 to 3.13.0 ([#3999](https://github.com/aws-powertools/powertools-lambda-python/issues/3999))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.94 to 0.1.96 ([#4002](https://github.com/aws-powertools/powertools-lambda-python/issues/4002))
+* **deps-dev:** bump the boto-typing group with 2 updates ([#3940](https://github.com/aws-powertools/powertools-lambda-python/issues/3940))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.87 to 0.1.88 ([#3942](https://github.com/aws-powertools/powertools-lambda-python/issues/3942))
+* **deps-dev:** bump pytest from 8.0.2 to 8.1.1 ([#3943](https://github.com/aws-powertools/powertools-lambda-python/issues/3943))
+* **deps-dev:** bump aws-cdk-aws-lambda-python-alpha from 2.131.0a0 to 2.132.1a0 ([#3944](https://github.com/aws-powertools/powertools-lambda-python/issues/3944))
+* **deps-dev:** bump cfn-lint from 0.86.0 to 0.86.1 ([#3998](https://github.com/aws-powertools/powertools-lambda-python/issues/3998))
+* **deps-dev:** bump aws-cdk from 2.132.0 to 2.132.1 ([#3938](https://github.com/aws-powertools/powertools-lambda-python/issues/3938))
+* **deps-dev:** bump aws-cdk-lib from 2.131.0 to 2.132.1 ([#3936](https://github.com/aws-powertools/powertools-lambda-python/issues/3936))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.96 to 0.1.99 ([#4008](https://github.com/aws-powertools/powertools-lambda-python/issues/4008))
+* **deps-dev:** bump aws-cdk from 2.131.0 to 2.132.0 ([#3928](https://github.com/aws-powertools/powertools-lambda-python/issues/3928))
+* **deps-dev:** bump types-redis from 4.6.0.20240218 to 4.6.0.20240311 ([#3931](https://github.com/aws-powertools/powertools-lambda-python/issues/3931))
+* **deps-dev:** bump types-python-dateutil from 2.8.19.20240106 to 2.8.19.20240311 ([#3932](https://github.com/aws-powertools/powertools-lambda-python/issues/3932))
+* **deps-dev:** bump pytest-mock from 3.13.0 to 3.14.0 ([#4007](https://github.com/aws-powertools/powertools-lambda-python/issues/4007))
+* **deps-dev:** bump filelock from 3.13.1 to 3.13.3 ([#4014](https://github.com/aws-powertools/powertools-lambda-python/issues/4014))
+* **deps-dev:** bump ruff from 0.3.0 to 0.3.2 ([#3925](https://github.com/aws-powertools/powertools-lambda-python/issues/3925))
+* **deps-dev:** bump mypy from 1.8.0 to 1.9.0 ([#3921](https://github.com/aws-powertools/powertools-lambda-python/issues/3921))
+* **deps-dev:** bump bandit from 1.7.7 to 1.7.8 ([#3920](https://github.com/aws-powertools/powertools-lambda-python/issues/3920))
+* **deps-dev:** bump pytest-cov from 4.1.0 to 5.0.0 ([#4013](https://github.com/aws-powertools/powertools-lambda-python/issues/4013))
+* **deps-dev:** bump pytest-asyncio from 0.23.5 to 0.23.5.post1 ([#3923](https://github.com/aws-powertools/powertools-lambda-python/issues/3923))
+* **deps-dev:** bump mkdocs-material from 9.5.14 to 9.5.15 ([#4016](https://github.com/aws-powertools/powertools-lambda-python/issues/4016))
+* **deps-dev:** bump the boto-typing group with 2 updates ([#3919](https://github.com/aws-powertools/powertools-lambda-python/issues/3919))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.101 to 0.1.104 ([#4020](https://github.com/aws-powertools/powertools-lambda-python/issues/4020))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.83 to 0.1.87 ([#3930](https://github.com/aws-powertools/powertools-lambda-python/issues/3930))
+
+
+
+## [v2.35.1] - 2024-03-08
+## Bug Fixes
+
+* **data_sources:** ensure correct types on SQSMessageAttributes ([#3898](https://github.com/aws-powertools/powertools-lambda-python/issues/3898))
+* **event_handler:** validate POST bodies on BedrockAgentResolver ([#3903](https://github.com/aws-powertools/powertools-lambda-python/issues/3903))
* **internal:** call ruff with correct args ([#3901](https://github.com/aws-powertools/powertools-lambda-python/issues/3901))
+## Features
+
+* **event_handler:** use custom serializer during openapi serialization ([#3900](https://github.com/aws-powertools/powertools-lambda-python/issues/3900))
+
## Maintenance
+* version bump
+* **deps:** bump aws-xray-sdk from 2.12.1 to 2.13.0 ([#3906](https://github.com/aws-powertools/powertools-lambda-python/issues/3906))
+* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#3911](https://github.com/aws-powertools/powertools-lambda-python/issues/3911))
* **deps:** bump squidfunk/mkdocs-material from `7be068b` to `3678304` in /docs ([#3894](https://github.com/aws-powertools/powertools-lambda-python/issues/3894))
-* **deps-dev:** bump mkdocs-material from 9.5.12 to 9.5.13 ([#3895](https://github.com/aws-powertools/powertools-lambda-python/issues/3895))
+* **deps:** bump datadog-lambda from 5.88.0 to 5.89.0 ([#3907](https://github.com/aws-powertools/powertools-lambda-python/issues/3907))
* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.81 to 0.1.82 ([#3896](https://github.com/aws-powertools/powertools-lambda-python/issues/3896))
+* **deps-dev:** bump sentry-sdk from 1.40.6 to 1.41.0 ([#3905](https://github.com/aws-powertools/powertools-lambda-python/issues/3905))
+* **deps-dev:** bump mkdocs-material from 9.5.12 to 9.5.13 ([#3895](https://github.com/aws-powertools/powertools-lambda-python/issues/3895))
+* **deps-dev:** bump cdklabs-generative-ai-cdk-constructs from 0.1.82 to 0.1.83 ([#3908](https://github.com/aws-powertools/powertools-lambda-python/issues/3908))
+* **deps-dev:** bump the boto-typing group with 1 update ([#3904](https://github.com/aws-powertools/powertools-lambda-python/issues/3904))
@@ -4504,7 +4613,8 @@
* Merge pull request [#5](https://github.com/aws-powertools/powertools-lambda-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.35.0...HEAD
+[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.35.1...HEAD
+[v2.35.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.35.0...v2.35.1
[v2.35.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.34.2...v2.35.0
[v2.34.2]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.34.1...v2.34.2
[v2.34.1]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.34.0...v2.34.1
diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py
index e72d39ba821..29601247b48 100644
--- a/aws_lambda_powertools/event_handler/api_gateway.py
+++ b/aws_lambda_powertools/event_handler/api_gateway.py
@@ -503,6 +503,18 @@ def _get_openapi_path(
if request_body_oai:
operation["requestBody"] = request_body_oai
+ # Validation failure response (422) will always be part of the schema
+ operation_responses: Dict[int, OpenAPIResponse] = {
+ 422: {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": COMPONENT_REF_PREFIX + "HTTPValidationError"},
+ },
+ },
+ },
+ }
+
# Add the response to the OpenAPI operation
if self.responses:
for status_code in list(self.responses):
@@ -549,45 +561,34 @@ def _get_openapi_path(
response["content"][content_type] = new_payload
- operation["responses"] = self.responses
+ # Merge the user provided response with the default responses
+ operation_responses[status_code] = response
else:
# Set the default 200 response
- responses = operation.setdefault("responses", {})
- success_response = responses.setdefault(200, {})
- success_response["description"] = self.response_description or _DEFAULT_OPENAPI_RESPONSE_DESCRIPTION
- success_response["content"] = {"application/json": {"schema": {}}}
- json_response = success_response["content"].setdefault("application/json", {})
-
- # Add the response schema to the OpenAPI 200 response
- json_response.update(
- self._openapi_operation_return(
- param=dependant.return_param,
- model_name_map=model_name_map,
- field_mapping=field_mapping,
- ),
+ response_schema = self._openapi_operation_return(
+ param=dependant.return_param,
+ model_name_map=model_name_map,
+ field_mapping=field_mapping,
)
- # Add validation failure response (422)
- operation["responses"][422] = {
- "description": "Validation Error",
- "content": {
- "application/json": {
- "schema": {"$ref": COMPONENT_REF_PREFIX + "HTTPValidationError"},
- },
- },
+ # Add the response schema to the OpenAPI 200 response
+ operation_responses[200] = {
+ "description": self.response_description or _DEFAULT_OPENAPI_RESPONSE_DESCRIPTION,
+ "content": {"application/json": response_schema},
}
- # Add the validation error schema to the definitions, but only if it hasn't been added yet
- if "ValidationError" not in definitions:
- definitions.update(
- {
- "ValidationError": validation_error_definition,
- "HTTPValidationError": validation_error_response_definition,
- },
- )
-
+ operation["responses"] = operation_responses
path[self.method.lower()] = operation
+ # Add the validation error schema to the definitions, but only if it hasn't been added yet
+ if "ValidationError" not in definitions:
+ definitions.update(
+ {
+ "ValidationError": validation_error_definition,
+ "HTTPValidationError": validation_error_response_definition,
+ },
+ )
+
# Generate the response schema
return path, definitions
@@ -1623,6 +1624,7 @@ def enable_swagger(
license_info: Optional["License"] = None,
swagger_base_url: Optional[str] = None,
middlewares: Optional[List[Callable[..., Response]]] = None,
+ compress: bool = False,
):
"""
Returns the OpenAPI schema as a JSON serializable dict
@@ -1655,11 +1657,13 @@ def enable_swagger(
The base url for the swagger UI. If not provided, we will serve a recent version of the Swagger UI.
middlewares: List[Callable[..., Response]], optional
List of middlewares to be used for the swagger route.
+ compress: bool, default = False
+ Whether or not to enable gzip compression swagger route.
"""
from aws_lambda_powertools.event_handler.openapi.compat import model_json
from aws_lambda_powertools.event_handler.openapi.models import Server
- @self.get(path, middlewares=middlewares, include_in_schema=False)
+ @self.get(path, middlewares=middlewares, include_in_schema=False, compress=compress)
def swagger_handler():
base_path = self._get_base_path()
@@ -2130,6 +2134,9 @@ def include_router(self, router: "Router", prefix: Optional[str] = None) -> None
logger.debug("Appending Router middlewares into App middlewares.")
self._router_middlewares = self._router_middlewares + router._router_middlewares
+ logger.debug("Appending Router exception_handler into App exception_handler.")
+ self._exception_handlers.update(router._exception_handlers)
+
# use pointer to allow context clearance after event is processed e.g., resolve(evt, ctx)
router.context = self.context
@@ -2195,6 +2202,7 @@ def __init__(self):
self._routes_with_middleware: Dict[tuple, List[Callable]] = {}
self.api_resolver: Optional[BaseRouter] = None
self.context = {} # early init as customers might add context before event resolution
+ self._exception_handlers: Dict[Type, Callable] = {}
def route(
self,
@@ -2249,6 +2257,17 @@ def register_route(func: Callable):
return register_route
+ def exception_handler(self, exc_class: Union[Type[Exception], List[Type[Exception]]]):
+ def register_exception_handler(func: Callable):
+ if isinstance(exc_class, list):
+ for exp in exc_class:
+ self._exception_handlers[exp] = func
+ else:
+ self._exception_handlers[exc_class] = func
+ return func
+
+ return register_exception_handler
+
class APIGatewayRestResolver(ApiGatewayResolver):
current_event: APIGatewayProxyEvent
diff --git a/aws_lambda_powertools/event_handler/openapi/types.py b/aws_lambda_powertools/event_handler/openapi/types.py
index beafa0e566c..5a99ee76e98 100644
--- a/aws_lambda_powertools/event_handler/openapi/types.py
+++ b/aws_lambda_powertools/event_handler/openapi/types.py
@@ -28,7 +28,7 @@
"type": "array",
"items": {"anyOf": [{"type": "string"}, {"type": "integer"}]},
},
- "msg": {"title": "Message", "type": "string"},
+ # For security reasons, we hide **msg** details (don't leak Python, Pydantic or filenames)
"type": {"title": "Error Type", "type": "string"},
},
"required": ["loc", "msg", "type"],
diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py
index 93e1dd45698..f86833a7851 100644
--- a/aws_lambda_powertools/logging/logger.py
+++ b/aws_lambda_powertools/logging/logger.py
@@ -580,7 +580,7 @@ def debug(
extra=extra,
)
- def append_keys(self, **additional_keys) -> None:
+ def append_keys(self, **additional_keys: object) -> None:
self.registered_formatter.append_keys(**additional_keys)
def remove_keys(self, keys: Iterable[str]) -> None:
diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py
index 5e3b9c84733..73b13e33e5c 100644
--- a/aws_lambda_powertools/metrics/base.py
+++ b/aws_lambda_powertools/metrics/base.py
@@ -17,6 +17,7 @@
MetricValueError,
SchemaValidationError,
)
+from aws_lambda_powertools.metrics.functions import convert_timestamp_to_emf_format, validate_emf_timestamp
from aws_lambda_powertools.metrics.provider import cold_start
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.constants import MAX_DIMENSIONS, MAX_METRICS
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit
@@ -76,6 +77,8 @@ def __init__(
self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV))
self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV))
self.metadata_set = metadata_set if metadata_set is not None else {}
+ self.timestamp: int | None = None
+
self._metric_units = [unit.value for unit in MetricUnit]
self._metric_unit_valid_options = list(MetricUnit.__members__)
self._metric_resolutions = [resolution.value for resolution in MetricResolution]
@@ -224,7 +227,7 @@ def serialize_metric_set(
return {
"_aws": {
- "Timestamp": int(datetime.datetime.now().timestamp() * 1000), # epoch
+ "Timestamp": self.timestamp or int(datetime.datetime.now().timestamp() * 1000), # epoch
"CloudWatchMetrics": [
{
"Namespace": self.namespace, # "test_namespace"
@@ -296,6 +299,31 @@ def add_metadata(self, key: str, value: Any) -> None:
else:
self.metadata_set[str(key)] = value
+ def set_timestamp(self, timestamp: int | datetime.datetime):
+ """
+ Set the timestamp for the metric.
+
+ Parameters:
+ -----------
+ timestamp: int | datetime.datetime
+ The timestamp to create the metric.
+ If an integer is provided, it is assumed to be the epoch time in milliseconds.
+ If a datetime object is provided, it will be converted to epoch time in milliseconds.
+ """
+ # The timestamp must be a Datetime object or an integer representing an epoch time.
+ # This should not exceed 14 days in the past or be more than 2 hours in the future.
+ # Any metrics failing to meet this criteria will be skipped by Amazon CloudWatch.
+ # See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html
+ # See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CloudWatch-Logs-Monitoring-CloudWatch-Metrics.html
+ if not validate_emf_timestamp(timestamp):
+ warnings.warn(
+ "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. "
+ "Ensure the timestamp is within 14 days past or 2 hours future.",
+ stacklevel=2,
+ )
+
+ self.timestamp = convert_timestamp_to_emf_format(timestamp)
+
def clear_metrics(self) -> None:
logger.debug("Clearing out existing metric set from memory")
self.metric_set.clear()
@@ -576,6 +604,9 @@ def single_metric(
Metric value
namespace: str
Namespace for metrics
+ default_dimensions: Dict[str, str], optional
+ Metric dimensions as key=value that will always be present
+
Yields
-------
diff --git a/aws_lambda_powertools/metrics/functions.py b/aws_lambda_powertools/metrics/functions.py
index e259826f1a7..ea8dc3603d1 100644
--- a/aws_lambda_powertools/metrics/functions.py
+++ b/aws_lambda_powertools/metrics/functions.py
@@ -1,10 +1,13 @@
from __future__ import annotations
+from datetime import datetime
+
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import (
MetricResolutionError,
MetricUnitError,
)
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit
+from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.shared.types import List
@@ -69,3 +72,66 @@ def extract_cloudwatch_metric_unit_value(metric_units: List, metric_valid_option
unit = unit.value
return unit
+
+
+def validate_emf_timestamp(timestamp: int | datetime) -> bool:
+ """
+ Validates a given timestamp based on CloudWatch Timestamp guidelines.
+
+ Timestamp must meet CloudWatch requirements, otherwise an InvalidTimestampError will be raised.
+ See [Timestamps](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#about_timestamp)
+ for valid values.
+
+ Parameters:
+ ----------
+ timestamp: int | datetime
+ Datetime object or epoch time in milliseconds representing the timestamp to validate.
+
+ Returns
+ -------
+ bool
+ Valid or not timestamp values
+ """
+
+ if not isinstance(timestamp, (int, datetime)):
+ return False
+
+ if isinstance(timestamp, datetime):
+ # Converting timestamp to epoch time in milliseconds
+ timestamp = int(timestamp.timestamp() * 1000)
+
+ # Consider current timezone when working with date and time
+ current_timezone = datetime.now().astimezone().tzinfo
+
+ current_time = int(datetime.now(current_timezone).timestamp() * 1000)
+ min_valid_timestamp = current_time - constants.EMF_MAX_TIMESTAMP_PAST_AGE
+ max_valid_timestamp = current_time + constants.EMF_MAX_TIMESTAMP_FUTURE_AGE
+
+ return min_valid_timestamp <= timestamp <= max_valid_timestamp
+
+
+def convert_timestamp_to_emf_format(timestamp: int | datetime) -> int:
+ """
+ Converts a timestamp to EMF compatible format.
+
+ Parameters
+ ----------
+ timestamp: int | datetime
+ The timestamp to convert. If already in epoch milliseconds format, returns it as is.
+ If datetime object, converts it to milliseconds since Unix epoch.
+
+ Returns:
+ --------
+ int
+ The timestamp converted to EMF compatible format (milliseconds since Unix epoch).
+ """
+ if isinstance(timestamp, int):
+ return timestamp
+
+ try:
+ return int(round(timestamp.timestamp() * 1000))
+ except AttributeError:
+ # If this point is reached, it indicates timestamp is not a datetime object
+ # Returning zero represents the initial date of epoch time,
+ # which will be skipped by Amazon CloudWatch.
+ return 0
diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py
index 976380ab6a9..05d9010684c 100644
--- a/aws_lambda_powertools/metrics/metrics.py
+++ b/aws_lambda_powertools/metrics/metrics.py
@@ -125,6 +125,19 @@ def serialize_metric_set(
def add_metadata(self, key: str, value: Any) -> None:
self.provider.add_metadata(key=key, value=value)
+ def set_timestamp(self, timestamp: int):
+ """
+ Set the timestamp for the metric.
+
+ Parameters:
+ -----------
+ timestamp: int | datetime.datetime
+ The timestamp to create the metric.
+ If an integer is provided, it is assumed to be the epoch time in milliseconds.
+ If a datetime object is provided, it will be converted to epoch time in milliseconds.
+ """
+ self.provider.set_timestamp(timestamp=timestamp)
+
def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None:
self.provider.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics)
diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py
index f5859c5a48d..d59026ebf69 100644
--- a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py
+++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py
@@ -12,8 +12,10 @@
from aws_lambda_powertools.metrics.base import single_metric
from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError
from aws_lambda_powertools.metrics.functions import (
+ convert_timestamp_to_emf_format,
extract_cloudwatch_metric_resolution_value,
extract_cloudwatch_metric_unit_value,
+ validate_emf_timestamp,
)
from aws_lambda_powertools.metrics.provider.base import BaseProvider
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.constants import MAX_DIMENSIONS, MAX_METRICS
@@ -73,6 +75,7 @@ def __init__(
self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV))
self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV))
self.metadata_set = metadata_set if metadata_set is not None else {}
+ self.timestamp: int | None = None
self._metric_units = [unit.value for unit in MetricUnit]
self._metric_unit_valid_options = list(MetricUnit.__members__)
@@ -231,7 +234,7 @@ def serialize_metric_set(
return {
"_aws": {
- "Timestamp": int(datetime.datetime.now().timestamp() * 1000), # epoch
+ "Timestamp": self.timestamp or int(datetime.datetime.now().timestamp() * 1000), # epoch
"CloudWatchMetrics": [
{
"Namespace": self.namespace, # "test_namespace"
@@ -304,6 +307,31 @@ def add_metadata(self, key: str, value: Any) -> None:
else:
self.metadata_set[str(key)] = value
+ def set_timestamp(self, timestamp: int | datetime.datetime):
+ """
+ Set the timestamp for the metric.
+
+ Parameters:
+ -----------
+ timestamp: int | datetime.datetime
+ The timestamp to create the metric.
+ If an integer is provided, it is assumed to be the epoch time in milliseconds.
+ If a datetime object is provided, it will be converted to epoch time in milliseconds.
+ """
+ # The timestamp must be a Datetime object or an integer representing an epoch time.
+ # This should not exceed 14 days in the past or be more than 2 hours in the future.
+ # Any metrics failing to meet this criteria will be skipped by Amazon CloudWatch.
+ # See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html
+ # See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CloudWatch-Logs-Monitoring-CloudWatch-Metrics.html
+ if not validate_emf_timestamp(timestamp):
+ warnings.warn(
+ "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. "
+ "Ensure the timestamp is within 14 days past or 2 hours future.",
+ stacklevel=2,
+ )
+
+ self.timestamp = convert_timestamp_to_emf_format(timestamp)
+
def clear_metrics(self) -> None:
logger.debug("Clearing out existing metric set from memory")
self.metric_set.clear()
diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py
index 7c247a8dabc..bb8164d1d37 100644
--- a/aws_lambda_powertools/shared/constants.py
+++ b/aws_lambda_powertools/shared/constants.py
@@ -1,32 +1,17 @@
+# Tracer constants
TRACER_CAPTURE_RESPONSE_ENV: str = "POWERTOOLS_TRACER_CAPTURE_RESPONSE"
TRACER_CAPTURE_ERROR_ENV: str = "POWERTOOLS_TRACER_CAPTURE_ERROR"
TRACER_DISABLED_ENV: str = "POWERTOOLS_TRACE_DISABLED"
+XRAY_SDK_MODULE: str = "aws_xray_sdk"
+XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core"
+XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID"
+MIDDLEWARE_FACTORY_TRACE_ENV: str = "POWERTOOLS_TRACE_MIDDLEWARES"
+INVALID_XRAY_NAME_CHARACTERS = r"[?;*()!$~^<>]"
+# Logger constants
LOGGER_LOG_SAMPLING_RATE: str = "POWERTOOLS_LOGGER_SAMPLE_RATE"
LOGGER_LOG_EVENT_ENV: str = "POWERTOOLS_LOGGER_LOG_EVENT"
LOGGER_LOG_DEDUPLICATION_ENV: str = "POWERTOOLS_LOG_DEDUPLICATION_DISABLED"
-
-MIDDLEWARE_FACTORY_TRACE_ENV: str = "POWERTOOLS_TRACE_MIDDLEWARES"
-
-METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE"
-
-DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG"
-
-SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME"
-XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID"
-LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT"
-
-
-LAMBDA_FUNCTION_NAME_ENV: str = "AWS_LAMBDA_FUNCTION_NAME"
-
-XRAY_SDK_MODULE: str = "aws_xray_sdk"
-XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core"
-
-IDEMPOTENCY_DISABLED_ENV: str = "POWERTOOLS_IDEMPOTENCY_DISABLED"
-
-PARAMETERS_SSM_DECRYPT_ENV: str = "POWERTOOLS_PARAMETERS_SSM_DECRYPT"
-PARAMETERS_MAX_AGE_ENV: str = "POWERTOOLS_PARAMETERS_MAX_AGE"
-
LOGGER_LAMBDA_CONTEXT_KEYS = [
"function_arn",
"function_memory_size",
@@ -35,17 +20,6 @@
"cold_start",
"xray_trace_id",
]
-
-# JSON indentation level
-PRETTY_INDENT: int = 4
-COMPACT_INDENT = None
-
-POWERTOOLS_DEV_ENV: str = "POWERTOOLS_DEV"
-POWERTOOLS_DEBUG_ENV: str = "POWERTOOLS_DEBUG"
-POWERTOOLS_LOG_LEVEL_ENV: str = "POWERTOOLS_LOG_LEVEL"
-POWERTOOLS_LOG_LEVEL_LEGACY_ENV: str = "LOG_LEVEL"
-LAMBDA_LOG_LEVEL_ENV: str = "AWS_LAMBDA_LOG_LEVEL"
-
# Mapping of Lambda log levels to Python logging levels
# https://docs.aws.amazon.com/lambda/latest/dg/configuration-logging.html#configuration-logging-log-levels
LAMBDA_ADVANCED_LOGGING_LEVELS = {
@@ -57,3 +31,37 @@
"ERROR": "ERROR",
"FATAL": "CRITICAL",
}
+POWERTOOLS_LOG_LEVEL_ENV: str = "POWERTOOLS_LOG_LEVEL"
+POWERTOOLS_LOG_LEVEL_LEGACY_ENV: str = "LOG_LEVEL"
+LAMBDA_LOG_LEVEL_ENV: str = "AWS_LAMBDA_LOG_LEVEL"
+
+# Metrics constants
+METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE"
+DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG"
+SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME"
+# If the timestamp of log event is more than 2 hours in future, the log event is skipped.
+# If the timestamp of log event is more than 14 days in past, the log event is skipped.
+# See https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AgentReference.html
+EMF_MAX_TIMESTAMP_PAST_AGE = 14 * 24 * 60 * 60 * 1000 # 14 days
+EMF_MAX_TIMESTAMP_FUTURE_AGE = 2 * 60 * 60 * 1000 # 2 hours
+
+# Parameters constants
+PARAMETERS_SSM_DECRYPT_ENV: str = "POWERTOOLS_PARAMETERS_SSM_DECRYPT"
+PARAMETERS_MAX_AGE_ENV: str = "POWERTOOLS_PARAMETERS_MAX_AGE"
+
+# Runtime and environment constants
+LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT"
+SAM_LOCAL_ENV: str = "AWS_SAM_LOCAL"
+CHALICE_LOCAL_ENV: str = "AWS_CHALICE_CLI_MODE"
+LAMBDA_FUNCTION_NAME_ENV: str = "AWS_LAMBDA_FUNCTION_NAME"
+
+# Debug constants
+POWERTOOLS_DEV_ENV: str = "POWERTOOLS_DEV"
+POWERTOOLS_DEBUG_ENV: str = "POWERTOOLS_DEBUG"
+
+# JSON constants
+PRETTY_INDENT: int = 4
+COMPACT_INDENT = None
+
+# Idempotency constants
+IDEMPOTENCY_DISABLED_ENV: str = "POWERTOOLS_IDEMPOTENCY_DISABLED"
diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py
index ee274498260..0f943f36d39 100644
--- a/aws_lambda_powertools/shared/functions.py
+++ b/aws_lambda_powertools/shared/functions.py
@@ -4,6 +4,7 @@
import itertools
import logging
import os
+import re
import warnings
from binascii import Error as BinAsciiError
from pathlib import Path
@@ -275,13 +276,10 @@ def abs_lambda_path(relative_path: str = "") -> str:
If the path is empty, it will return the current working directory.
"""
# Retrieve the LAMBDA_TASK_ROOT environment variable or default to an empty string
- current_working_directory = os.environ.get("LAMBDA_TASK_ROOT", "")
+ current_working_directory = os.environ.get("LAMBDA_TASK_ROOT", "") or str(Path.cwd())
- # If LAMBDA_TASK_ROOT is not set, use the current working directory
- if not current_working_directory:
- current_working_directory = str(Path.cwd())
+ return str(Path(current_working_directory, relative_path))
- # Combine the current working directory and the relative path to get the absolute path
- absolute_path = str(Path(current_working_directory, relative_path))
- return absolute_path
+def sanitize_xray_segment_name(name: str) -> str:
+ return re.sub(constants.INVALID_XRAY_NAME_CHARACTERS, "", name)
diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py
index 30b96909f3e..4b1169ffb2e 100644
--- a/aws_lambda_powertools/shared/version.py
+++ b/aws_lambda_powertools/shared/version.py
@@ -1,3 +1,3 @@
"""Exposes version constant to avoid circular dependencies."""
-VERSION = "2.35.0"
+VERSION = "2.36.0"
diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py
index b30467188f9..a79ac4ec738 100644
--- a/aws_lambda_powertools/tracing/tracer.py
+++ b/aws_lambda_powertools/tracing/tracer.py
@@ -7,11 +7,15 @@
import os
from typing import Any, Callable, Dict, List, Optional, Sequence, Union, cast, overload
-from ..shared import constants
-from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice
-from ..shared.lazy_import import LazyLoader
-from ..shared.types import AnyCallableT
-from .base import BaseProvider, BaseSegment
+from aws_lambda_powertools.shared import constants
+from aws_lambda_powertools.shared.functions import (
+ resolve_env_var_choice,
+ resolve_truthy_env_var_choice,
+ sanitize_xray_segment_name,
+)
+from aws_lambda_powertools.shared.lazy_import import LazyLoader
+from aws_lambda_powertools.shared.types import AnyCallableT
+from aws_lambda_powertools.tracing.base import BaseProvider, BaseSegment
is_cold_start = True
logger = logging.getLogger(__name__)
@@ -520,7 +524,8 @@ async def async_tasks():
)
# Example: app.ClassA.get_all # noqa ERA001
- method_name = f"{method.__module__}.{method.__qualname__}"
+ # Valid characters can be found at http://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html
+ method_name = sanitize_xray_segment_name(f"{method.__module__}.{method.__qualname__}")
capture_response = resolve_truthy_env_var_choice(
env=os.getenv(constants.TRACER_CAPTURE_RESPONSE_ENV, "true"),
@@ -766,13 +771,15 @@ def _is_tracer_disabled() -> Union[bool, str]:
"""
logger.debug("Verifying whether Tracing has been disabled")
is_lambda_env = os.getenv(constants.LAMBDA_TASK_ROOT_ENV)
+ is_lambda_sam_cli = os.getenv(constants.SAM_LOCAL_ENV)
+ is_chalice_cli = os.getenv(constants.CHALICE_LOCAL_ENV)
is_disabled = resolve_truthy_env_var_choice(env=os.getenv(constants.TRACER_DISABLED_ENV, "false"))
if is_disabled:
logger.debug("Tracing has been disabled via env var POWERTOOLS_TRACE_DISABLED")
return is_disabled
- if not is_lambda_env:
+ if not is_lambda_env or (is_lambda_sam_cli or is_chalice_cli):
logger.debug("Running outside Lambda env; disabling Tracing")
return True
diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py
index a3eefbb9cea..3f4075c7d2f 100644
--- a/aws_lambda_powertools/utilities/batch/exceptions.py
+++ b/aws_lambda_powertools/utilities/batch/exceptions.py
@@ -36,3 +36,19 @@ def __init__(self, msg="", child_exceptions: List[ExceptionInfo] | None = None):
def __str__(self):
parent_exception_str = super(BatchProcessingError, self).__str__()
return self.format_exceptions(parent_exception_str)
+
+
+class SQSFifoCircuitBreakerError(Exception):
+ """
+ Signals a record not processed due to the SQS FIFO processing being interrupted
+ """
+
+ pass
+
+
+class SQSFifoMessageGroupCircuitBreakerError(Exception):
+ """
+ Signals a record not processed due to the SQS FIFO message group processing being interrupted
+ """
+
+ pass
diff --git a/aws_lambda_powertools/utilities/batch/sqs_fifo_partial_processor.py b/aws_lambda_powertools/utilities/batch/sqs_fifo_partial_processor.py
index d48749a137e..e54389718bc 100644
--- a/aws_lambda_powertools/utilities/batch/sqs_fifo_partial_processor.py
+++ b/aws_lambda_powertools/utilities/batch/sqs_fifo_partial_processor.py
@@ -1,15 +1,14 @@
-from typing import List, Optional, Tuple
-
-from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType
+import logging
+from typing import Optional, Set
+
+from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, ExceptionInfo, FailureResponse
+from aws_lambda_powertools.utilities.batch.exceptions import (
+ SQSFifoCircuitBreakerError,
+ SQSFifoMessageGroupCircuitBreakerError,
+)
from aws_lambda_powertools.utilities.batch.types import BatchSqsTypeModel
-
-class SQSFifoCircuitBreakerError(Exception):
- """
- Signals a record not processed due to the SQS FIFO processing being interrupted
- """
-
- pass
+logger = logging.getLogger(__name__)
class SqsFifoPartialProcessor(BatchProcessor):
@@ -57,36 +56,59 @@ def lambda_handler(event, context: LambdaContext):
None,
)
- def __init__(self, model: Optional["BatchSqsTypeModel"] = None):
- super().__init__(EventType.SQS, model)
+ group_circuit_breaker_exc = (
+ SQSFifoMessageGroupCircuitBreakerError,
+ SQSFifoMessageGroupCircuitBreakerError("A previous record from this message group failed processing"),
+ None,
+ )
- def process(self) -> List[Tuple]:
+ def __init__(self, model: Optional["BatchSqsTypeModel"] = None, skip_group_on_error: bool = False):
"""
- Call instance's handler for each record. When the first failed message is detected,
- the process is short-circuited, and the remaining messages are reported as failed items.
+ Initialize the SqsFifoProcessor.
+
+ Parameters
+ ----------
+ model: Optional["BatchSqsTypeModel"]
+ An optional model for batch processing.
+ skip_group_on_error: bool
+ Determines whether to exclusively skip messages from the MessageGroupID that encountered processing failures
+ Default is False.
+
"""
- result: List[Tuple] = []
+ self._skip_group_on_error: bool = skip_group_on_error
+ self._current_group_id = None
+ self._failed_group_ids: Set[str] = set()
+ super().__init__(EventType.SQS, model)
- for i, record in enumerate(self.records):
- # If we have failed messages, it means that the last message failed.
- # We then short circuit the process, failing the remaining messages
- if self.fail_messages:
- return self._short_circuit_processing(i, result)
+ def _process_record(self, record):
+ self._current_group_id = record.get("attributes", {}).get("MessageGroupId")
- # Otherwise, process the message normally
- result.append(self._process_record(record))
+ # Short-circuits the process if:
+ # - There are failed messages, OR
+ # - The `skip_group_on_error` option is on, and the current message is part of a failed group.
+ fail_entire_batch = bool(self.fail_messages) and not self._skip_group_on_error
+ fail_group_id = self._skip_group_on_error and self._current_group_id in self._failed_group_ids
+ if fail_entire_batch or fail_group_id:
+ return self.failure_handler(
+ record=self._to_batch_type(record, event_type=self.event_type, model=self.model),
+ exception=self.group_circuit_breaker_exc if self._skip_group_on_error else self.circuit_breaker_exc,
+ )
- return result
+ return super()._process_record(record)
- def _short_circuit_processing(self, first_failure_index: int, result: List[Tuple]) -> List[Tuple]:
- """
- Starting from the first failure index, fail all the remaining messages, and append them to the result list.
- """
- remaining_records = self.records[first_failure_index:]
- for remaining_record in remaining_records:
- data = self._to_batch_type(record=remaining_record, event_type=self.event_type, model=self.model)
- result.append(self.failure_handler(record=data, exception=self.circuit_breaker_exc))
- return result
+ def failure_handler(self, record, exception: ExceptionInfo) -> FailureResponse:
+ # If we are failing a message and the `skip_group_on_error` is on, we store the failed group ID
+ # This way, future messages with the same group ID will be failed automatically.
+ if self._skip_group_on_error and self._current_group_id:
+ self._failed_group_ids.add(self._current_group_id)
+
+ return super().failure_handler(record, exception)
+
+ def _clean(self):
+ self._failed_group_ids.clear()
+ self._current_group_id = None
+
+ super()._clean()
async def _async_process_record(self, record: dict):
raise NotImplementedError()
diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py
index 38274f0bab4..64416e3cdd9 100644
--- a/aws_lambda_powertools/utilities/data_classes/__init__.py
+++ b/aws_lambda_powertools/utilities/data_classes/__init__.py
@@ -7,6 +7,14 @@
from .appsync_resolver_event import AppSyncResolverEvent
from .aws_config_rule_event import AWSConfigRuleEvent
from .bedrock_agent_event import BedrockAgentEvent
+from .cloud_watch_alarm_event import (
+ CloudWatchAlarmConfiguration,
+ CloudWatchAlarmData,
+ CloudWatchAlarmEvent,
+ CloudWatchAlarmMetric,
+ CloudWatchAlarmMetricStat,
+ CloudWatchAlarmState,
+)
from .cloud_watch_custom_widget_event import CloudWatchDashboardCustomWidgetEvent
from .cloud_watch_logs_event import CloudWatchLogsEvent
from .code_pipeline_job_event import CodePipelineJobEvent
@@ -42,6 +50,12 @@
"AppSyncResolverEvent",
"ALBEvent",
"BedrockAgentEvent",
+ "CloudWatchAlarmData",
+ "CloudWatchAlarmEvent",
+ "CloudWatchAlarmMetric",
+ "CloudWatchAlarmState",
+ "CloudWatchAlarmConfiguration",
+ "CloudWatchAlarmMetricStat",
"CloudWatchDashboardCustomWidgetEvent",
"CloudWatchLogsEvent",
"CodePipelineJobEvent",
diff --git a/aws_lambda_powertools/utilities/data_classes/cloud_watch_alarm_event.py b/aws_lambda_powertools/utilities/data_classes/cloud_watch_alarm_event.py
new file mode 100644
index 00000000000..d085228cb37
--- /dev/null
+++ b/aws_lambda_powertools/utilities/data_classes/cloud_watch_alarm_event.py
@@ -0,0 +1,243 @@
+from __future__ import annotations
+
+from functools import cached_property
+from typing import Any, Dict, List, Literal, Optional
+
+from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
+
+
+class CloudWatchAlarmState(DictWrapper):
+ @property
+ def value(self) -> Literal["OK", "ALARM", "INSUFFICIENT_DATA"]:
+ """
+ Overall state of the alarm.
+ """
+ return self["value"]
+
+ @property
+ def reason(self) -> str:
+ """
+ Reason why alarm was changed to this state.
+ """
+ return self["reason"]
+
+ @property
+ def reason_data(self) -> str:
+ """
+ Additional data to back up the reason, usually contains the evaluated data points,
+ the calculated threshold and timestamps.
+ """
+ return self["reasonData"]
+
+ @cached_property
+ def reason_data_decoded(self) -> Optional[Any]:
+ """
+ Deserialized version of reason_data.
+ """
+
+ return self._json_deserializer(self.reason_data) if self.reason_data else None
+
+ @property
+ def actions_suppressed_by(self) -> Optional[Literal["Alarm", "ExtensionPeriod", "WaitPeriod"]]:
+ """
+ Describes why the actions when the value is `ALARM` are suppressed in a composite
+ alarm.
+ """
+ return self.get("actionsSuppressedBy", None)
+
+ @property
+ def actions_suppressed_reason(self) -> Optional[str]:
+ """
+ Captures the reason for action suppression.
+ """
+ return self.get("actionsSuppressedReason", None)
+
+ @property
+ def timestamp(self) -> str:
+ """
+ Timestamp of this state change in ISO-8601 format.
+ """
+ return self["timestamp"]
+
+
+class CloudWatchAlarmMetric(DictWrapper):
+ @property
+ def metric_id(self) -> str:
+ """
+ Unique ID of the alarm metric.
+ """
+ return self["id"]
+
+ @property
+ def expression(self) -> Optional[str]:
+ """
+ Optional expression of the alarm metric.
+ """
+ return self.get("expression", None)
+
+ @property
+ def label(self) -> Optional[str]:
+ """
+ Optional label of the alarm metric.
+ """
+ return self.get("label", None)
+
+ @property
+ def return_data(self) -> bool:
+ """
+ Whether this metric data is used to determine the state of the alarm or not.
+ """
+ return self["returnData"]
+
+ @property
+ def metric_stat(self) -> CloudWatchAlarmMetricStat:
+ return CloudWatchAlarmMetricStat(self["metricStat"])
+
+
+class CloudWatchAlarmMetricStat(DictWrapper):
+ @property
+ def period(self) -> Optional[int]:
+ """
+ Metric evaluation period, in seconds.
+ """
+ return self.get("period", None)
+
+ @property
+ def stat(self) -> Optional[str]:
+ """
+ Statistical aggregation of metric points, e.g. Average, SampleCount, etc.
+ """
+ return self.get("stat", None)
+
+ @property
+ def unit(self) -> Optional[str]:
+ """
+ Unit for metric.
+ """
+ return self.get("unit", None)
+
+ @property
+ def metric(self) -> Optional[Dict]:
+ """
+ Metric details
+ """
+ return self.get("metric", {})
+
+
+class CloudWatchAlarmData(DictWrapper):
+ @property
+ def alarm_name(self) -> str:
+ """
+ Alarm name.
+ """
+ return self["alarmName"]
+
+ @property
+ def state(self) -> CloudWatchAlarmState:
+ """
+ The current state of the Alarm.
+ """
+ return CloudWatchAlarmState(self["state"])
+
+ @property
+ def previous_state(self) -> CloudWatchAlarmState:
+ """
+ The previous state of the Alarm.
+ """
+ return CloudWatchAlarmState(self["previousState"])
+
+ @property
+ def configuration(self) -> CloudWatchAlarmConfiguration:
+ """
+ The configuration of the Alarm.
+ """
+ return CloudWatchAlarmConfiguration(self["configuration"])
+
+
+class CloudWatchAlarmConfiguration(DictWrapper):
+ @property
+ def description(self) -> Optional[str]:
+ """
+ Optional description for the Alarm.
+ """
+ return self.get("description", None)
+
+ @property
+ def alarm_rule(self) -> Optional[str]:
+ """
+ Optional description for the Alarm rule in case of composite alarm.
+ """
+ return self.get("alarmRule", None)
+
+ @property
+ def alarm_actions_suppressor(self) -> Optional[str]:
+ """
+ Optional action suppression for the Alarm rule in case of composite alarm.
+ """
+ return self.get("actionsSuppressor", None)
+
+ @property
+ def alarm_actions_suppressor_wait_period(self) -> Optional[str]:
+ """
+ Optional action suppression wait period for the Alarm rule in case of composite alarm.
+ """
+ return self.get("actionsSuppressorWaitPeriod", None)
+
+ @property
+ def alarm_actions_suppressor_extension_period(self) -> Optional[str]:
+ """
+ Optional action suppression extension period for the Alarm rule in case of composite alarm.
+ """
+ return self.get("actionsSuppressorExtensionPeriod", None)
+
+ @property
+ def metrics(self) -> Optional[List[CloudWatchAlarmMetric]]:
+ """
+ The metrics evaluated for the Alarm.
+ """
+ metrics = self.get("metrics")
+ return [CloudWatchAlarmMetric(i) for i in metrics] if metrics else None
+
+
+class CloudWatchAlarmEvent(DictWrapper):
+ @property
+ def source(self) -> Literal["aws.cloudwatch"]:
+ """
+ Source of the triggered event.
+ """
+ return self["source"]
+
+ @property
+ def alarm_arn(self) -> str:
+ """
+ The ARN of the CloudWatch Alarm.
+ """
+ return self["alarmArn"]
+
+ @property
+ def region(self) -> str:
+ """
+ The AWS region in which the Alarm is active.
+ """
+ return self["region"]
+
+ @property
+ def source_account_id(self) -> str:
+ """
+ The AWS Account ID that the Alarm is deployed to.
+ """
+ return self["accountId"]
+
+ @property
+ def timestamp(self) -> str:
+ """
+ Alarm state change event timestamp in ISO-8601 format.
+ """
+ return self["time"]
+
+ @property
+ def alarm_data(self) -> CloudWatchAlarmData:
+ """
+ Contains basic data about the Alarm and its current and previous states.
+ """
+ return CloudWatchAlarmData(self["alarmData"])
diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py
index 104993deae8..ffca15cc318 100644
--- a/aws_lambda_powertools/utilities/data_classes/common.py
+++ b/aws_lambda_powertools/utilities/data_classes/common.py
@@ -150,13 +150,16 @@ def body(self) -> Optional[str]:
@cached_property
def json_body(self) -> Any:
"""Parses the submitted body as json"""
- return self._json_deserializer(self.decoded_body)
+ if self.decoded_body:
+ return self._json_deserializer(self.decoded_body)
+
+ return None
@cached_property
- def decoded_body(self) -> str:
- """Dynamically base64 decode body as a str"""
- body: str = self["body"]
- if self.is_base64_encoded:
+ def decoded_body(self) -> Optional[str]:
+ """Decode the body from base64 if encoded, otherwise return it as is."""
+ body: Optional[str] = self.body
+ if self.is_base64_encoded and body:
return base64.b64decode(body.encode()).decode()
return body
diff --git a/aws_lambda_powertools/utilities/parameters/__init__.py b/aws_lambda_powertools/utilities/parameters/__init__.py
index 9fcaa4fa701..9f8827ed9b6 100644
--- a/aws_lambda_powertools/utilities/parameters/__init__.py
+++ b/aws_lambda_powertools/utilities/parameters/__init__.py
@@ -8,8 +8,8 @@
from .base import BaseProvider, clear_caches
from .dynamodb import DynamoDBProvider
from .exceptions import GetParameterError, TransformParameterError
-from .secrets import SecretsProvider, get_secret
-from .ssm import SSMProvider, get_parameter, get_parameters, get_parameters_by_name
+from .secrets import SecretsProvider, get_secret, set_secret
+from .ssm import SSMProvider, get_parameter, get_parameters, get_parameters_by_name, set_parameter
__all__ = [
"AppConfigProvider",
@@ -21,8 +21,10 @@
"TransformParameterError",
"get_app_config",
"get_parameter",
+ "set_parameter",
"get_parameters",
"get_parameters_by_name",
"get_secret",
+ "set_secret",
"clear_caches",
]
diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py
index 5ce06589613..2317ebc82d9 100644
--- a/aws_lambda_powertools/utilities/parameters/base.py
+++ b/aws_lambda_powertools/utilities/parameters/base.py
@@ -154,6 +154,12 @@ def _get(self, name: str, **sdk_options) -> Union[str, bytes, Dict[str, Any]]:
"""
raise NotImplementedError()
+ def set(self, name: str, value: Any, *, overwrite: bool = False, **kwargs):
+ """
+ Set parameter value from the underlying parameter store
+ """
+ raise NotImplementedError()
+
def get_multiple(
self,
path: str,
diff --git a/aws_lambda_powertools/utilities/parameters/exceptions.py b/aws_lambda_powertools/utilities/parameters/exceptions.py
index 1287568b463..6a9554bf142 100644
--- a/aws_lambda_powertools/utilities/parameters/exceptions.py
+++ b/aws_lambda_powertools/utilities/parameters/exceptions.py
@@ -9,3 +9,11 @@ class GetParameterError(Exception):
class TransformParameterError(Exception):
"""When a provider fails to transform a parameter value"""
+
+
+class SetParameterError(Exception):
+ """When a provider raises an exception on writing a SSM parameter"""
+
+
+class SetSecretError(Exception):
+ """When a provider raises an exception on writing a secret"""
diff --git a/aws_lambda_powertools/utilities/parameters/secrets.py b/aws_lambda_powertools/utilities/parameters/secrets.py
index beb4bb80846..0494c64985a 100644
--- a/aws_lambda_powertools/utilities/parameters/secrets.py
+++ b/aws_lambda_powertools/utilities/parameters/secrets.py
@@ -2,8 +2,12 @@
AWS Secrets Manager parameter retrieval and caching utility
"""
+from __future__ import annotations
+
+import json
+import logging
import os
-from typing import TYPE_CHECKING, Any, Dict, Optional, Union
+from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, overload
import boto3
from botocore.config import Config
@@ -13,8 +17,12 @@
from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.shared.functions import resolve_max_age
+from aws_lambda_powertools.shared.json_encoder import Encoder
+from aws_lambda_powertools.utilities.parameters.base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider
+from aws_lambda_powertools.utilities.parameters.exceptions import SetSecretError
+from aws_lambda_powertools.utilities.parameters.types import SetSecretResponse, TransformOptions
-from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider
+logger = logging.getLogger(__name__)
class SecretsProvider(BaseProvider):
@@ -115,10 +123,178 @@ def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
"""
raise NotImplementedError()
+ def _create_secret(self, name: str, **sdk_options):
+ """
+ Create a secret with the given name.
+
+ Parameters:
+ ----------
+ name: str
+ The name of the secret.
+ **sdk_options:
+ Additional options to be passed to the create_secret method.
+
+ Raises:
+ SetSecretError: If there is an error setting the secret.
+ """
+ try:
+ sdk_options["Name"] = name
+ return self.client.create_secret(**sdk_options)
+ except Exception as exc:
+ raise SetSecretError(f"Error setting secret - {str(exc)}") from exc
+
+ def _update_secret(self, name: str, **sdk_options):
+ """
+ Update a secret with the given name.
+
+ Parameters:
+ ----------
+ name: str
+ The name of the secret.
+ **sdk_options:
+ Additional options to be passed to the create_secret method.
+ """
+ sdk_options["SecretId"] = name
+ return self.client.put_secret_value(**sdk_options)
+
+ def set(
+ self,
+ name: str,
+ value: Union[str, dict, bytes],
+ *, # force keyword arguments
+ client_request_token: Optional[str] = None,
+ **sdk_options,
+ ) -> SetSecretResponse:
+ """
+ Modify the details of a secret or create a new secret if it doesn't already exist.
+
+ We aim to minimize API calls by assuming that the secret already exists and needs updating.
+ If it doesn't exist, we attempt to create a new one. Refer to the following workflow for a better understanding:
+
+
+ ┌────────────────────────┐ ┌─────────────────┐
+ ┌───────▶│Resource NotFound error?│────▶│Create Secret API│─────┐
+ │ └────────────────────────┘ └─────────────────┘ │
+ │ │
+ │ │
+ │ ▼
+ ┌─────────────────┐ ┌─────────────────────┐
+ │Update Secret API│────────────────────────────────────────────▶│ Return or Exception │
+ └─────────────────┘ └─────────────────────┘
+
+ Parameters
+ ----------
+ name: str
+ The ARN or name of the secret to add a new version to or create a new one.
+ value: str, dict or bytes
+ Specifies text data that you want to encrypt and store in this new version of the secret.
+ client_request_token: str, optional
+ This value helps ensure idempotency. It's recommended that you generate
+ a UUID-type value to ensure uniqueness within the specified secret.
+ This value becomes the VersionId of the new version. This field is
+ auto-populated if not provided, but no idempotency will be enforced this way.
+ sdk_options: dict, optional
+ Dictionary of options that will be passed to the Secrets Manager update_secret API call
+
+ Raises
+ ------
+ SetSecretError
+ When attempting to update or create a secret fails.
+
+ Returns:
+ -------
+ SetSecretResponse:
+ The dict returned by boto3.
+
+ Example
+ -------
+ **Sets a secret***
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>>
+ >>> parameters.set_secret(name="llamas-are-awesome", value="supers3cr3tllam@passw0rd")
+
+ **Sets a secret and includes an client_request_token**
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>> import uuid
+ >>>
+ >>> parameters.set_secret(
+ name="my-secret",
+ value='{"password": "supers3cr3tllam@passw0rd"}',
+ client_request_token=str(uuid.uuid4())
+ )
+
+ URLs:
+ -------
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager/client/put_secret_value.html
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager/client/create_secret.html
+ """
+
+ if isinstance(value, dict):
+ value = json.dumps(value, cls=Encoder)
+
+ if isinstance(value, bytes):
+ sdk_options["SecretBinary"] = value
+ else:
+ sdk_options["SecretString"] = value
+
+ if client_request_token:
+ sdk_options["ClientRequestToken"] = client_request_token
+
+ try:
+ logger.debug(f"Attempting to update secret {name}")
+ return self._update_secret(name=name, **sdk_options)
+ except self.client.exceptions.ResourceNotFoundException:
+ logger.debug(f"Secret {name} doesn't exist, creating a new one")
+ return self._create_secret(name=name, **sdk_options)
+ except Exception as exc:
+ raise SetSecretError(f"Error setting secret - {str(exc)}") from exc
+
+
+@overload
+def get_secret(
+ name: str,
+ transform: None = None,
+ force_fetch: bool = False,
+ max_age: Optional[int] = None,
+ **sdk_options,
+) -> str: ...
+
+
+@overload
+def get_secret(
+ name: str,
+ transform: Literal["json"],
+ force_fetch: bool = False,
+ max_age: Optional[int] = None,
+ **sdk_options,
+) -> dict: ...
+
+
+@overload
+def get_secret(
+ name: str,
+ transform: Literal["binary"],
+ force_fetch: bool = False,
+ max_age: Optional[int] = None,
+ **sdk_options,
+) -> Union[str, dict, bytes]: ...
+
+
+@overload
+def get_secret(
+ name: str,
+ transform: Literal["auto"],
+ force_fetch: bool = False,
+ max_age: Optional[int] = None,
+ **sdk_options,
+) -> bytes: ...
+
def get_secret(
name: str,
- transform: Optional[str] = None,
+ transform: TransformOptions = None,
force_fetch: bool = False,
max_age: Optional[int] = None,
**sdk_options,
@@ -182,3 +358,87 @@ def get_secret(
force_fetch=force_fetch,
**sdk_options,
)
+
+
+def set_secret(
+ name: str,
+ value: Union[str, bytes],
+ *, # force keyword arguments
+ client_request_token: Optional[str] = None,
+ **sdk_options,
+) -> SetSecretResponse:
+ """
+ Modify the details of a secret or create a new secret if it doesn't already exist.
+
+ We aim to minimize API calls by assuming that the secret already exists and needs updating.
+ If it doesn't exist, we attempt to create a new one. Refer to the following workflow for a better understanding:
+
+
+ ┌────────────────────────┐ ┌─────────────────┐
+ ┌───────▶│Resource NotFound error?│────▶│Create Secret API│─────┐
+ │ └────────────────────────┘ └─────────────────┘ │
+ │ │
+ │ │
+ │ ▼
+ ┌─────────────────┐ ┌─────────────────────┐
+ │Update Secret API│────────────────────────────────────────────▶│ Return or Exception │
+ └─────────────────┘ └─────────────────────┘
+
+ Parameters
+ ----------
+ name: str
+ The ARN or name of the secret to add a new version to or create a new one.
+ value: str, dict or bytes
+ Specifies text data that you want to encrypt and store in this new version of the secret.
+ client_request_token: str, optional
+ This value helps ensure idempotency. It's recommended that you generate
+ a UUID-type value to ensure uniqueness within the specified secret.
+ This value becomes the VersionId of the new version. This field is
+ auto-populated if not provided, but no idempotency will be enforced this way.
+ sdk_options: dict, optional
+ Dictionary of options that will be passed to the Secrets Manager update_secret API call
+
+ Raises
+ ------
+ SetSecretError
+ When attempting to update or create a secret fails.
+
+ Returns:
+ -------
+ SetSecretResponse:
+ The dict returned by boto3.
+
+ Example
+ -------
+ **Sets a secret***
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>>
+ >>> parameters.set_secret(name="llamas-are-awesome", value="supers3cr3tllam@passw0rd")
+
+ **Sets a secret and includes an client_request_token**
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>>
+ >>> parameters.set_secret(
+ name="my-secret",
+ value='{"password": "supers3cr3tllam@passw0rd"}',
+ client_request_token="61f2af5f-5f75-44b1-a29f-0cc37af55b11"
+ )
+
+ URLs:
+ -------
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager/client/put_secret_value.html
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager/client/create_secret.html
+ """
+
+ # Only create the provider if this function is called at least once
+ if "secrets" not in DEFAULT_PROVIDERS:
+ DEFAULT_PROVIDERS["secrets"] = SecretsProvider()
+
+ return DEFAULT_PROVIDERS["secrets"].set(
+ name=name,
+ value=value,
+ client_request_token=client_request_token,
+ **sdk_options,
+ )
diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py
index 1be07e7c9f0..76553bda0fe 100644
--- a/aws_lambda_powertools/utilities/parameters/ssm.py
+++ b/aws_lambda_powertools/utilities/parameters/ssm.py
@@ -4,6 +4,7 @@
from __future__ import annotations
+import logging
import os
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, overload
@@ -17,15 +18,24 @@
slice_dictionary,
)
from aws_lambda_powertools.shared.types import Literal
-
-from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider, transform_value
-from .exceptions import GetParameterError
-from .types import TransformOptions
+from aws_lambda_powertools.utilities.parameters.base import (
+ DEFAULT_MAX_AGE_SECS,
+ DEFAULT_PROVIDERS,
+ BaseProvider,
+ transform_value,
+)
+from aws_lambda_powertools.utilities.parameters.exceptions import GetParameterError, SetParameterError
+from aws_lambda_powertools.utilities.parameters.types import PutParameterResponse, TransformOptions
if TYPE_CHECKING:
from mypy_boto3_ssm import SSMClient
from mypy_boto3_ssm.type_defs import GetParametersResultTypeDef
+SSM_PARAMETER_TYPES = Literal["String", "StringList", "SecureString"]
+SSM_PARAMETER_TIER = Literal["Standard", "Advanced", "Intelligent-Tiering"]
+
+logger = logging.getLogger(__name__)
+
class SSMProvider(BaseProvider):
"""
@@ -169,6 +179,126 @@ def get( # type: ignore[override]
return super().get(name, max_age, transform, force_fetch, **sdk_options)
+ @overload
+ def set(
+ self,
+ name: str,
+ value: list[str],
+ *,
+ overwrite: bool = False,
+ description: str = "",
+ parameter_type: Literal["StringList"] = "StringList",
+ tier: Literal["Standard", "Advanced", "Intelligent-Tiering"] = "Standard",
+ kms_key_id: str | None = "None",
+ **sdk_options,
+ ): ...
+
+ @overload
+ def set(
+ self,
+ name: str,
+ value: str,
+ *,
+ overwrite: bool = False,
+ description: str = "",
+ parameter_type: Literal["SecureString"] = "SecureString",
+ tier: Literal["Standard", "Advanced", "Intelligent-Tiering"] = "Standard",
+ kms_key_id: str,
+ **sdk_options,
+ ): ...
+
+ @overload
+ def set(
+ self,
+ name: str,
+ value: str,
+ *,
+ overwrite: bool = False,
+ description: str = "",
+ parameter_type: Literal["String"] = "String",
+ tier: Literal["Standard", "Advanced", "Intelligent-Tiering"] = "Standard",
+ kms_key_id: str | None = None,
+ **sdk_options,
+ ): ...
+
+ def set(
+ self,
+ name: str,
+ value: str | list[str],
+ *,
+ overwrite: bool = False,
+ description: str = "",
+ parameter_type: SSM_PARAMETER_TYPES = "String",
+ tier: SSM_PARAMETER_TIER = "Standard",
+ kms_key_id: str | None = None,
+ **sdk_options,
+ ) -> PutParameterResponse:
+ """
+ Sets a parameter in AWS Systems Manager Parameter Store.
+
+ Parameters
+ ----------
+ name: str
+ The fully qualified name includes the complete hierarchy of the parameter name and name.
+ value: str
+ The parameter value
+ overwrite: bool, optional
+ If the parameter value should be overwritten, False by default
+ description: str, optional
+ The description of the parameter
+ parameter_type: str, optional
+ Type of the parameter. Allowed values are String, StringList, and SecureString
+ tier: str, optional
+ The parameter tier to use. Allowed values are Standard, Advanced, and Intelligent-Tiering
+ kms_key_id: str, optional
+ The KMS key id to use to encrypt the parameter
+ sdk_options: dict, optional
+ Dictionary of options that will be passed to the Parameter Store get_parameter API call
+
+ Raises
+ ------
+ SetParameterError
+ When the parameter provider fails to retrieve a parameter value for
+ a given name.
+
+ URLs:
+ -------
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm/client/put_parameter.html
+
+ Example
+ -------
+ **Sets a parameter value from Systems Manager Parameter Store**
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>>
+ >>> response = parameters.set_parameter(name="/my/example/parameter", value="More Powertools")
+ >>>
+ >>> print(response)
+ 123
+
+ Returns
+ -------
+ PutParameterResponse
+ The dict returned by boto3.
+ """
+ opts = {
+ "Name": name,
+ "Value": value,
+ "Overwrite": overwrite,
+ "Type": parameter_type,
+ "Tier": tier,
+ "Description": description,
+ **sdk_options,
+ }
+
+ if kms_key_id:
+ opts["KeyId"] = kms_key_id
+
+ try:
+ return self.client.put_parameter(**opts)
+ except Exception as exc:
+ raise SetParameterError(f"Error setting parameter - {str(exc)}") from exc
+
def _get(self, name: str, decrypt: bool = False, **sdk_options) -> str:
"""
Retrieve a parameter value from AWS Systems Manager Parameter Store
@@ -811,6 +941,81 @@ def get_parameters(
)
+def set_parameter(
+ name: str,
+ value: str,
+ *, # force keyword arguments
+ overwrite: bool = False,
+ description: str = "",
+ parameter_type: SSM_PARAMETER_TYPES = "String",
+ tier: SSM_PARAMETER_TIER = "Standard",
+ kms_key_id: str | None = None,
+ **sdk_options,
+) -> PutParameterResponse:
+ """
+ Sets a parameter in AWS Systems Manager Parameter Store.
+
+ Parameters
+ ----------
+ name: str
+ The fully qualified name includes the complete hierarchy of the parameter name and name.
+ value: str
+ The parameter value
+ overwrite: bool, optional
+ If the parameter value should be overwritten, False by default
+ description: str, optional
+ The description of the parameter
+ parameter_type: str, optional
+ Type of the parameter. Allowed values are String, StringList, and SecureString
+ tier: str, optional
+ The parameter tier to use. Allowed values are Standard, Advanced, and Intelligent-Tiering
+ kms_key_id: str, optional
+ The KMS key id to use to encrypt the parameter
+ sdk_options: dict, optional
+ Dictionary of options that will be passed to the Parameter Store get_parameter API call
+
+ Raises
+ ------
+ SetParameterError
+ When attempting to set a parameter fails.
+
+ URLs:
+ -------
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm/client/put_parameter.html
+
+ Example
+ -------
+ **Sets a parameter value from Systems Manager Parameter Store**
+
+ >>> from aws_lambda_powertools.utilities import parameters
+ >>>
+ >>> response = parameters.set_parameter(name="/my/example/parameter", value="More Powertools")
+ >>>
+ >>> print(response)
+ 123
+
+ Returns
+ -------
+ PutParameterResponse
+ The dict returned by boto3.
+ """
+
+ # Only create the provider if this function is called at least once
+ if "ssm" not in DEFAULT_PROVIDERS:
+ DEFAULT_PROVIDERS["ssm"] = SSMProvider()
+
+ return DEFAULT_PROVIDERS["ssm"].set(
+ name,
+ value,
+ parameter_type=parameter_type,
+ overwrite=overwrite,
+ tier=tier,
+ description=description,
+ kms_key_id=kms_key_id,
+ **sdk_options,
+ )
+
+
@overload
def get_parameters_by_name(
parameters: Dict[str, Dict],
diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py
index faa06cee89e..c087a3764f4 100644
--- a/aws_lambda_powertools/utilities/parameters/types.py
+++ b/aws_lambda_powertools/utilities/parameters/types.py
@@ -1,3 +1,20 @@
-from aws_lambda_powertools.shared.types import Literal
+from typing import Any, Optional
+
+from aws_lambda_powertools.shared.types import Dict, List, Literal, TypedDict
TransformOptions = Literal["json", "binary", "auto", None]
+
+
+class PutParameterResponse(TypedDict):
+ Version: int
+ Tier: str
+ ResponseMetadata: dict
+
+
+class SetSecretResponse(TypedDict):
+ ARN: str
+ Name: str
+ VersionId: str
+ VersionStages: Optional[List[str]]
+ ReplicationStatus: Optional[List[Dict[str, Any]]]
+ ResponseMetadata: dict
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 6f554c8868d..5633fd4605c 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,5 +1,5 @@
# v9.1.18
-FROM squidfunk/mkdocs-material@sha256:3678304a65e17660953a30c0a0be0bc2fb8f55ac450216c14af6ba942badc4dc
+FROM squidfunk/mkdocs-material@sha256:33076657e536b6b8439168296a193098aef3c4c88cc2cecd0736cd391b90e7fd
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
COPY requirements.txt /tmp/
RUN pip install --require-hashes -r /tmp/requirements.txt
diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md
index 97a1bf3c68e..8740c264c46 100644
--- a/docs/core/event_handler/api_gateway.md
+++ b/docs/core/event_handler/api_gateway.md
@@ -524,11 +524,12 @@ Behind the scenes, the [data validation](#data-validation) feature auto-generate
There are some important **caveats** that you should know before enabling it:
-| Caveat | Description |
+| Caveat | Description |
| ------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| Swagger UI is **publicly accessible by default** | When using `enable_swagger` method, you can [protect sensitive API endpoints by implementing a custom middleware](#customizing-swagger-ui) using your preferred authorization mechanism. |
-| **No micro-functions support** yet | Swagger UI is enabled on a per resolver instance which will limit its accuracy here. |
-| You need to expose a **new route** | You'll need to expose the following path to Lambda: `/swagger`; ignore if you're routing this path already. |
+| Swagger UI is **publicly accessible by default** | When using `enable_swagger` method, you can [protect sensitive API endpoints by implementing a custom middleware](#customizing-swagger-ui) using your preferred authorization mechanism. |
+| **No micro-functions support** yet | Swagger UI is enabled on a per resolver instance which will limit its accuracy here. |
+| You need to expose a **new route** | You'll need to expose the following path to Lambda: `/swagger`; ignore if you're routing this path already. |
+| JS and CSS files are **embedded within Swagger HTML** | If you are not using an external CDN to serve Swagger UI assets, we embed JS and CSS directly into the HTML. To enhance performance, please consider enabling the `compress` option to minimize the size of HTTP requests. |
```python hl_lines="12-13" title="enabling_swagger.py"
--8<-- "examples/event_handler_rest/src/enabling_swagger.py"
diff --git a/docs/core/metrics.md b/docs/core/metrics.md
index 19a34cf21ad..7cb1f0b2527 100644
--- a/docs/core/metrics.md
+++ b/docs/core/metrics.md
@@ -131,6 +131,21 @@ If you'd like to remove them at some point, you can use `clear_default_dimension
--8<-- "examples/metrics/src/set_default_dimensions_log_metrics.py"
```
+### Changing default timestamp
+
+When creating metrics, we use the current timestamp. If you want to change the timestamp of all the metrics you create, utilize the `set_timestamp` function. You can specify a datetime object or an integer representing an epoch timestamp in milliseconds.
+
+Note that when specifying the timestamp using an integer, it must adhere to the epoch timezone format in milliseconds.
+
+???+ info
+ If you need to use different timestamps across multiple metrics, opt for [single_metric](#working-with-different-timestamp).
+
+=== "set_custom_timestamp_log_metrics.py"
+
+ ```python hl_lines="15"
+ --8<-- "examples/metrics/src/set_custom_timestamp_log_metrics.py"
+ ```
+
### Flushing metrics
As you finish adding all your metrics, you need to serialize and flush them to standard output. You can do that automatically with the `log_metrics` decorator.
@@ -224,14 +239,15 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata
--8<-- "examples/metrics/src/add_metadata_output.json"
```
-### Single metric with a different dimension
+### Single metric
-CloudWatch EMF uses the same dimensions across all your metrics. Use `single_metric` if you have a metric that should have different dimensions.
+CloudWatch EMF uses the same dimensions and timestamp across all your metrics. Use `single_metric` if you have a metric that should have different dimensions or timestamp.
-???+ info
- Generally, this would be an edge case since you [pay for unique metric](https://aws.amazon.com/cloudwatch/pricing){target="_blank"}. Keep the following formula in mind:
+#### Working with different dimensions
- **unique metric = (metric_name + dimension_name + dimension_value)**
+Generally, using different dimensions would be an edge case since you [pay for unique metric](https://aws.amazon.com/cloudwatch/pricing){target="_blank"}.
+
+Keep the following formula in mind: **unique metric = (metric_name + dimension_name + dimension_value)**
=== "single_metric.py"
@@ -259,6 +275,22 @@ By default it will skip all previously defined dimensions including default dime
--8<-- "examples/metrics/src/single_metric_default_dimensions.py"
```
+#### Working with different timestamp
+
+When working with multiple metrics, customers may need different timestamps between them. In such cases, utilize `single_metric` to flush individual metrics with specific timestamps.
+
+=== "single_metric_with_different_timestamp.py"
+
+ ```python hl_lines="15 17"
+ --8<-- "examples/metrics/src/single_metric_with_different_timestamp.py"
+ ```
+
+=== "single_metric_with_different_timestamp_payload.json"
+
+ ```json hl_lines="5 10 15 20 25"
+ --8<-- "examples/metrics/src/single_metric_with_different_timestamp_payload.json"
+ ```
+
### Flushing metrics manually
If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data.
diff --git a/docs/core/tracer.md b/docs/core/tracer.md
index f7163564e6b..f665e6ae280 100644
--- a/docs/core/tracer.md
+++ b/docs/core/tracer.md
@@ -227,7 +227,7 @@ Tracer keeps a copy of its configuration after the first initialization. This is
## Testing your code
-Tracer is disabled by default when not running in the AWS Lambda environment - This means no code changes or environment variables to be set.
+Tracer is disabled by default when not running in the AWS Lambda environment, including AWS SAM CLI and Chalice environments. This means no code changes or environment variables to be set.
## Tips
diff --git a/docs/index.md b/docs/index.md
index 5235e518131..a894d0052fc 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -67,8 +67,8 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
For the latter, make sure to replace `{region}` with your AWS region, e.g., `eu-west-1`.
- * x86 architecture: __arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66__{: .copyMe}:clipboard:
- * ARM architecture: __arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66__{: .copyMe}:clipboard:
+ * x86 architecture: __arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67__{: .copyMe}:clipboard:
+ * ARM architecture: __arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67__{: .copyMe}:clipboard:
???+ note "Code snippets for popular infrastructure as code frameworks"
@@ -81,7 +81,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
Type: AWS::Serverless::Function
Properties:
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
```
=== "Serverless framework"
@@ -91,7 +91,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
hello:
handler: lambda_function.lambda_handler
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
```
=== "CDK"
@@ -107,7 +107,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -156,7 +156,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
@@ -209,7 +209,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
❯ amplify push -y
@@ -220,7 +220,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
? Do you want to edit the local lambda function now? No
```
@@ -234,7 +234,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
Properties:
Architectures: [arm64]
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67
```
=== "Serverless framework"
@@ -245,7 +245,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
handler: lambda_function.lambda_handler
architecture: arm64
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67
```
=== "CDK"
@@ -261,7 +261,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -311,7 +311,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67"]
architectures = ["arm64"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
@@ -367,7 +367,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67
❯ amplify push -y
@@ -378,7 +378,7 @@ You can install Powertools for AWS Lambda (Python) using your favorite dependenc
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67
? Do you want to edit the local lambda function now? No
```
@@ -409,74 +409,74 @@ In this context, `[aws-sdk]` is an alias to the `boto3` package. Due to dependen
| Region | Layer ARN |
| -------------------- | --------------------------------------------------------------------------------------------------------- |
- | **`af-south-1`** | **arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-east-1`** | **arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-1`** | **arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-2`** | **arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-3`** | **arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-south-1`** | **arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-south-2`** | **arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-1`** | **arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-2`** | **arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-3`** | **arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-4`** | **arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ca-central-1`** | **arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`ca-west-1`** | **arn:aws:lambda:ca-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-central-1`** | **arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-central-2`** | **arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-north-1`** | **arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-south-1`** | **arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-south-2`** | **arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-west-1`** | **arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-west-2`** | **arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`eu-west-3`** | **arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`il-central-1`** | **arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`me-central-1`** | **arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`me-south-1`** | **arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`sa-east-1`** | **arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`us-east-1`** | **arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`us-east-2`** | **arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`us-west-1`** | **arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
- | **`us-west-2`** | **arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:66**{: .copyMe}:clipboard: |
+ | **`af-south-1`** | **arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-east-1`** | **arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-1`** | **arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-2`** | **arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-3`** | **arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-south-1`** | **arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-south-2`** | **arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-1`** | **arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-2`** | **arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-3`** | **arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-4`** | **arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ca-central-1`** | **arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`ca-west-1`** | **arn:aws:lambda:ca-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-central-1`** | **arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-central-2`** | **arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-north-1`** | **arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-south-1`** | **arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-south-2`** | **arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-west-1`** | **arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-west-2`** | **arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`eu-west-3`** | **arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`il-central-1`** | **arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`me-central-1`** | **arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`me-south-1`** | **arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`sa-east-1`** | **arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`us-east-1`** | **arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`us-east-2`** | **arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`us-west-1`** | **arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
+ | **`us-west-2`** | **arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:67**{: .copyMe}:clipboard: |
=== "arm64"
| Region | Layer ARN |
| -------------------- | --------------------------------------------------------------------------------------------------------------- |
- | **`af-south-1`** | **arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-east-1`** | **arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-1`** | **arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-2`** | **arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-northeast-3`** | **arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-south-1`** | **arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-south-2`** | **arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-1`** | **arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-2`** | **arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ap-southeast-3`** | **arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`ca-central-1`** | **arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-central-1`** | **arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-central-2`** | **arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-north-1`** | **arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-south-1`** | **arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-south-2`** | **arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-west-1`** | **arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-west-2`** | **arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`eu-west-3`** | **arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`il-central-1`** | **arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`me-central-1`** | **arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`me-south-1`** | **arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`sa-east-1`** | **arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`us-east-1`** | **arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`us-east-2`** | **arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`us-west-1`** | **arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
- | **`us-west-2`** | **arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:66**{: .copyMe}:clipboard: |
+ | **`af-south-1`** | **arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-east-1`** | **arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-1`** | **arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-2`** | **arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-northeast-3`** | **arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-south-1`** | **arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-south-2`** | **arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-1`** | **arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-2`** | **arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ap-southeast-3`** | **arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`ca-central-1`** | **arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-central-1`** | **arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-central-2`** | **arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-north-1`** | **arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-south-1`** | **arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-south-2`** | **arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-west-1`** | **arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-west-2`** | **arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`eu-west-3`** | **arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`il-central-1`** | **arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`me-central-1`** | **arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`me-south-1`** | **arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`sa-east-1`** | **arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`us-east-1`** | **arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`us-east-2`** | **arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`us-west-1`** | **arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
+ | **`us-west-2`** | **arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:67**{: .copyMe}:clipboard: |
**Want to inspect the contents of the Layer?**
The pre-signed URL to download this Lambda Layer will be within `Location` key in the CLI output. The CLI output will also contain the Powertools for AWS Lambda version it contains.
```bash title="AWS CLI command to download Lambda Layer content"
-aws lambda get-layer-version-by-arn --arn arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:66 --region eu-west-1
+aws lambda get-layer-version-by-arn --arn arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:67 --region eu-west-1
```
#### SAR
diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md
index c5acf22cead..efb2c0cbccc 100644
--- a/docs/tutorial/index.md
+++ b/docs/tutorial/index.md
@@ -727,7 +727,7 @@ Cross-cutting concerns like filtering traces by Cold Start, including response a
We can simplify our previous patterns by using [Powertools for AWS Lambda (Python) Tracer](../core/tracer.md){target="_blank"}; a thin wrapper on top of X-Ray SDK.
???+ note
- You can now safely remove `aws-xray-sdk` from `requirements.txt`; keep `aws-lambda-powertools` only.
+ You can now safely remove `aws-xray-sdk` from `requirements.txt`; keep `aws-lambda-powertools[tracer]` only.
```python title="Refactoring with Powertools for AWS Lambda (Python) Tracer" hl_lines="1 6 11 13 19 21 27"
from aws_lambda_powertools import Logger, Tracer
diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md
index ada05766ab4..6b8e0fd3000 100644
--- a/docs/utilities/batch.md
+++ b/docs/utilities/batch.md
@@ -141,8 +141,11 @@ Processing batches from SQS works in three stages:
#### FIFO queues
-When using [SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html){target="_blank" rel="nofollow"}, we will stop processing messages after the first failure, and return all failed and unprocessed messages in `batchItemFailures`.
-This helps preserve the ordering of messages in your queue.
+When working with [SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html){target="_blank"}, a batch may include messages from different group IDs.
+
+By default, we will stop processing at the first failure and mark unprocessed messages as failed to preserve ordering. However, this behavior may not be optimal for customers who wish to proceed with processing messages from a different group ID.
+
+Enable the `skip_group_on_error` option for seamless processing of messages from various group IDs. This setup ensures that messages from a failed group ID are sent back to SQS, enabling uninterrupted processing of messages from the subsequent group ID.
=== "Recommended"
@@ -164,6 +167,12 @@ This helps preserve the ordering of messages in your queue.
--8<-- "examples/batch_processing/src/getting_started_sqs_fifo_decorator.py"
```
+=== "Enabling skip_group_on_error flag"
+
+ ```python hl_lines="2-6 9 23"
+ --8<-- "examples/batch_processing/src/getting_started_sqs_fifo_skip_on_error.py"
+ ```
+
### Processing messages from Kinesis
Processing batches from Kinesis works in three stages:
@@ -311,7 +320,7 @@ sequenceDiagram
> Read more about [Batch Failure Reporting feature in AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank"}.
-Sequence diagram to explain how [`SqsFifoPartialProcessor` works](#fifo-queues) with SQS FIFO queues.
+Sequence diagram to explain how [`SqsFifoPartialProcessor` works](#fifo-queues) with SQS FIFO queues without `skip_group_on_error` flag.
```mermaid
@@ -335,6 +344,31 @@ sequenceDiagram
SQS FIFO mechanism with Batch Item Failures
+Sequence diagram to explain how [`SqsFifoPartialProcessor` works](#fifo-queues) with SQS FIFO queues with `skip_group_on_error` flag.
+
+
+```mermaid
+sequenceDiagram
+ autonumber
+ participant SQS queue
+ participant Lambda service
+ participant Lambda function
+ Lambda service->>SQS queue: Poll
+ Lambda service->>Lambda function: Invoke (batch event)
+ activate Lambda function
+ Lambda function-->Lambda function: Process 2 out of 10 batch items
+ Lambda function--xLambda function: Fail on 3rd batch item
+ Lambda function-->Lambda function: Process messages from another MessageGroupID
+ Lambda function->>Lambda service: Report 3rd batch item and all messages within the same MessageGroupID as failure
+ deactivate Lambda function
+ activate SQS queue
+ Lambda service->>SQS queue: Delete successful messages processed
+ SQS queue-->>SQS queue: Failed messages return
+ deactivate SQS queue
+```
+SQS FIFO mechanism with Batch Item Failures
+
+
#### Kinesis and DynamoDB Streams
> Read more about [Batch Failure Reporting feature](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-batchfailurereporting){target="_blank"}.
@@ -570,12 +604,28 @@ classDiagram
* **`_prepare()`** – called once as part of the processor initialization
* **`_clean()`** – teardown logic called once after `_process_record` completes
* **`_async_process_record()`** – If you need to implement asynchronous logic, use this method, otherwise define it in your class with empty logic
+* **`response()`** - called upon completion of processing
-You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function.
+You can utilize this class to instantiate a new processor and then pass it to the `process_partial_response` function.
+
+=== "Creating a custom batch processor"
+
+ ```python hl_lines="10-13 21 37 43 46 53 64 69 73"
+ --8<-- "examples/batch_processing/src/custom_partial_processor.py"
+ ```
+
+=== "DynamoDB table used for storing processed records."
+
+ ```yaml
+ --8<-- "examples/batch_processing/sam/custom_partial_processor_dynamodb_table.yaml"
+ ```
+
+=== "Sample event"
+
+ ```json
+ --8<-- "examples/batch_processing/src/custom_partial_processor_payload.json"
+ ```
-```python hl_lines="9-11 19 33 39 46 57 62 66 74" title="Creating a custom batch processor"
---8<-- "examples/batch_processing/src/custom_partial_processor.py"
-```
### Caveats
diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md
index 97b7a5dfda2..45c9ccd9869 100644
--- a/docs/utilities/data_classes.md
+++ b/docs/utilities/data_classes.md
@@ -74,39 +74,40 @@ Log Data Event for Troubleshooting
## Supported event sources
-| Event Source | Data_class |
-|---------------------------------------------------------------------------|----------------------------------------------------|
-| [Active MQ](#active-mq) | `ActiveMQEvent` |
-| [API Gateway Authorizer](#api-gateway-authorizer) | `APIGatewayAuthorizerRequestEvent` |
-| [API Gateway Authorizer V2](#api-gateway-authorizer-v2) | `APIGatewayAuthorizerEventV2` |
-| [API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent` |
-| [API Gateway Proxy V2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` |
-| [Application Load Balancer](#application-load-balancer) | `ALBEvent` |
-| [AppSync Authorizer](#appsync-authorizer) | `AppSyncAuthorizerEvent` |
-| [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` |
-| [AWS Config Rule](#aws-config-rule) | `AWSConfigRuleEvent` |
-| [Bedrock Agent](#bedrock-agent) | `BedrockAgent` |
-| [CloudWatch Dashboard Custom Widget](#cloudwatch-dashboard-custom-widget) | `CloudWatchDashboardCustomWidgetEvent` |
-| [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` |
-| [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` |
-| [Cognito User Pool](#cognito-user-pool) | Multiple available under `cognito_user_pool_event` |
-| [Connect Contact Flow](#connect-contact-flow) | `ConnectContactFlowEvent` |
-| [DynamoDB streams](#dynamodb-streams) | `DynamoDBStreamEvent`, `DynamoDBRecordEventName` |
-| [EventBridge](#eventbridge) | `EventBridgeEvent` |
-| [Kafka](#kafka) | `KafkaEvent` |
-| [Kinesis Data Stream](#kinesis-streams) | `KinesisStreamEvent` |
-| [Kinesis Firehose Delivery Stream](#kinesis-firehose-delivery-stream) | `KinesisFirehoseEvent` |
-| [Lambda Function URL](#lambda-function-url) | `LambdaFunctionUrlEvent` |
-| [Rabbit MQ](#rabbit-mq) | `RabbitMQEvent` |
-| [S3](#s3) | `S3Event` |
-| [S3 Batch Operations](#s3-batch-operations) | `S3BatchOperationEvent` |
-| [S3 Object Lambda](#s3-object-lambda) | `S3ObjectLambdaEvent` |
-| [S3 EventBridge Notification](#s3-eventbridge-notification) | `S3EventBridgeNotificationEvent` |
-| [SES](#ses) | `SESEvent` |
-| [SNS](#sns) | `SNSEvent` |
-| [SQS](#sqs) | `SQSEvent` |
-| [VPC Lattice V2](#vpc-lattice-v2) | `VPCLatticeV2Event` |
-| [VPC Lattice V1](#vpc-lattice-v1) | `VPCLatticeEvent` |
+| Event Source | Data_class |
+|-------------------------------------------------------------------------------|----------------------------------------------------|
+| [Active MQ](#active-mq) | `ActiveMQEvent` |
+| [API Gateway Authorizer](#api-gateway-authorizer) | `APIGatewayAuthorizerRequestEvent` |
+| [API Gateway Authorizer V2](#api-gateway-authorizer-v2) | `APIGatewayAuthorizerEventV2` |
+| [API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent` |
+| [API Gateway Proxy V2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` |
+| [Application Load Balancer](#application-load-balancer) | `ALBEvent` |
+| [AppSync Authorizer](#appsync-authorizer) | `AppSyncAuthorizerEvent` |
+| [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` |
+| [AWS Config Rule](#aws-config-rule) | `AWSConfigRuleEvent` |
+| [Bedrock Agent](#bedrock-agent) | `BedrockAgent` |
+| [CloudWatch Alarm State Change Action](#cloudwatch-alarm-state-change-action) | `CloudWatchAlarmEvent` |
+| [CloudWatch Dashboard Custom Widget](#cloudwatch-dashboard-custom-widget) | `CloudWatchDashboardCustomWidgetEvent` |
+| [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` |
+| [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` |
+| [Cognito User Pool](#cognito-user-pool) | Multiple available under `cognito_user_pool_event` |
+| [Connect Contact Flow](#connect-contact-flow) | `ConnectContactFlowEvent` |
+| [DynamoDB streams](#dynamodb-streams) | `DynamoDBStreamEvent`, `DynamoDBRecordEventName` |
+| [EventBridge](#eventbridge) | `EventBridgeEvent` |
+| [Kafka](#kafka) | `KafkaEvent` |
+| [Kinesis Data Stream](#kinesis-streams) | `KinesisStreamEvent` |
+| [Kinesis Firehose Delivery Stream](#kinesis-firehose-delivery-stream) | `KinesisFirehoseEvent` |
+| [Lambda Function URL](#lambda-function-url) | `LambdaFunctionUrlEvent` |
+| [Rabbit MQ](#rabbit-mq) | `RabbitMQEvent` |
+| [S3](#s3) | `S3Event` |
+| [S3 Batch Operations](#s3-batch-operations) | `S3BatchOperationEvent` |
+| [S3 Object Lambda](#s3-object-lambda) | `S3ObjectLambdaEvent` |
+| [S3 EventBridge Notification](#s3-eventbridge-notification) | `S3EventBridgeNotificationEvent` |
+| [SES](#ses) | `SESEvent` |
+| [SNS](#sns) | `SNSEvent` |
+| [SQS](#sqs) | `SQSEvent` |
+| [VPC Lattice V2](#vpc-lattice-v2) | `VPCLatticeV2Event` |
+| [VPC Lattice V1](#vpc-lattice-v1) | `VPCLatticeEvent` |
???+ info
The examples provided below are far from exhaustive - the data classes themselves are designed to provide a form of
@@ -528,6 +529,17 @@ In this example, we also use the new Logger `correlation_id` and built-in `corre
return { "markdown": f"# {echo}" }
```
+### CloudWatch Alarm State Change Action
+
+[CloudWatch supports Lambda as an alarm state change action](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#alarms-and-actions){target="_blank"}.
+You can use the `CloudWathAlarmEvent` data class to access the fields containing such data as alarm information, current state, and previous state.
+
+=== "app.py"
+
+ ```python hl_lines="2 8"
+ --8<-- "examples/event_sources/src/cloudwatch_alarm_event.py"
+ ```
+
### CloudWatch Logs
CloudWatch Logs events by default are compressed and base64 encoded. You can use the helper function provided to decode,
diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md
index d2d80230c77..92c0c53ce86 100644
--- a/docs/utilities/parameters.md
+++ b/docs/utilities/parameters.md
@@ -27,16 +27,18 @@ This utility requires additional permissions to work as expected.
???+ note
Different parameter providers require different permissions.
-| Provider | Function/Method | IAM Permission |
-| --------- | ---------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
-| SSM | **`get_parameter`**, **`SSMProvider.get`** | **`ssm:GetParameter`** |
-| SSM | **`get_parameters`**, **`SSMProvider.get_multiple`** | **`ssm:GetParametersByPath`** |
-| SSM | **`get_parameters_by_name`**, **`SSMProvider.get_parameters_by_name`** | **`ssm:GetParameter`** and **`ssm:GetParameters`** |
-| SSM | If using **`decrypt=True`** | You must add an additional permission **`kms:Decrypt`** |
-| Secrets | **`get_secret`**, **`SecretsProvider.get`** | **`secretsmanager:GetSecretValue`** |
-| DynamoDB | **`DynamoDBProvider.get`** | **`dynamodb:GetItem`** |
-| DynamoDB | **`DynamoDBProvider.get_multiple`** | **`dynamodb:Query`** |
-| AppConfig | **`get_app_config`**, **`AppConfigProvider.get_app_config`** | **`appconfig:GetLatestConfiguration`** and **`appconfig:StartConfigurationSession`** |
+| Provider | Function/Method | IAM Permission |
+| --------- | ---------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- |
+| SSM | **`get_parameter`**, **`SSMProvider.get`** | **`ssm:GetParameter`** |
+| SSM | **`get_parameters`**, **`SSMProvider.get_multiple`** | **`ssm:GetParametersByPath`** |
+| SSM | **`get_parameters_by_name`**, **`SSMProvider.get_parameters_by_name`** | **`ssm:GetParameter`** and **`ssm:GetParameters`** |
+| SSM | **`set_parameter`**, **`SSMProvider.set_parameter`** | **`ssm:PutParameter`** |
+| SSM | If using **`decrypt=True`** | You must add an additional permission **`kms:Decrypt`** |
+| Secrets | **`get_secret`**, **`SecretsProvider.get`** | **`secretsmanager:GetSecretValue`** |
+| Secrets | **`set_secret`**, **`SecretsProvider.set`** | **`secretsmanager:PutSecretValue`** and **`secretsmanager:CreateSecret`** (if creating secrets) |
+| DynamoDB | **`DynamoDBProvider.get`** | **`dynamodb:GetItem`** |
+| DynamoDB | **`DynamoDBProvider.get_multiple`** | **`dynamodb:Query`** |
+| AppConfig | **`get_app_config`**, **`AppConfigProvider.get_app_config`** | **`appconfig:GetLatestConfiguration`** and **`appconfig:StartConfigurationSession`** |
### Fetching parameters
@@ -84,6 +86,22 @@ For multiple parameters, you can use either:
--8<-- "examples/parameters/src/get_parameter_by_name_error_handling.py"
```
+### Setting parameters
+
+You can set a parameter using the `set_parameter` high-level function. This will create a new parameter if it doesn't exist.
+
+=== "getting_started_set_single_ssm_parameter.py"
+ ```python hl_lines="8"
+ --8<-- "examples/parameters/src/getting_started_set_single_ssm_parameter.py"
+ ```
+
+=== "getting_started_set_ssm_parameter_overwrite.py"
+ Sometimes you may be setting a parameter that you will have to update later on. Use the `overwrite` option to overwrite any existing value. If you do not set this option, the parameter value will not be overwritten and an exception will be raised.
+
+ ```python hl_lines="8 12"
+ --8<-- "examples/parameters/src/getting_started_set_ssm_parameter_overwrite.py"
+ ```
+
### Fetching secrets
You can fetch secrets stored in Secrets Manager using `get_secret`.
@@ -93,6 +111,18 @@ You can fetch secrets stored in Secrets Manager using `get_secret`.
--8<-- "examples/parameters/src/getting_started_secret.py"
```
+### Setting secrets
+
+You can set secrets stored in Secrets Manager using `set_secret`.
+
+???+ note
+ We strive to minimize API calls by attempting to update existing secrets as our primary approach. If a secret doesn't exist, we proceed to create a new one.
+
+=== "getting_started_secret.py"
+ ```python hl_lines="4 25"
+ --8<-- "examples/parameters/src/getting_started_setting_secret.py"
+ ```
+
### Fetching app configurations
You can fetch application configurations in AWS AppConfig using `get_app_config`.
diff --git a/examples/batch_processing/sam/custom_partial_processor_dynamodb_table.yaml b/examples/batch_processing/sam/custom_partial_processor_dynamodb_table.yaml
new file mode 100644
index 00000000000..ac8d9253ed2
--- /dev/null
+++ b/examples/batch_processing/sam/custom_partial_processor_dynamodb_table.yaml
@@ -0,0 +1,15 @@
+Transform: AWS::Serverless-2016-10-31
+Resources:
+ IdempotencyTable:
+ Type: AWS::DynamoDB::Table
+ Properties:
+ AttributeDefinitions:
+ - AttributeName: messageId
+ AttributeType: S
+ KeySchema:
+ - AttributeName: messageId
+ KeyType: HASH
+ TimeToLiveSpecification:
+ AttributeName: expiration
+ Enabled: true
+ BillingMode: PAY_PER_REQUEST
diff --git a/examples/batch_processing/src/custom_partial_processor.py b/examples/batch_processing/src/custom_partial_processor.py
index f4aaa5733b5..aa8e319b21d 100644
--- a/examples/batch_processing/src/custom_partial_processor.py
+++ b/examples/batch_processing/src/custom_partial_processor.py
@@ -1,3 +1,4 @@
+import copy
import os
import sys
from random import randint
@@ -10,13 +11,15 @@
BasePartialProcessor,
process_partial_response,
)
+from aws_lambda_powertools.utilities.batch.types import PartialItemFailureResponse
-table_name = os.getenv("TABLE_NAME", "table_not_found")
+table_name = os.getenv("TABLE_NAME", "table_store_batch")
logger = Logger()
class MyPartialProcessor(BasePartialProcessor):
+ DEFAULT_RESPONSE: PartialItemFailureResponse = {"batchItemFailures": []}
"""
Process a record and stores successful results at a Amazon DynamoDB Table
@@ -28,6 +31,7 @@ class MyPartialProcessor(BasePartialProcessor):
def __init__(self, table_name: str):
self.table_name = table_name
+ self.batch_response: PartialItemFailureResponse = copy.deepcopy(self.DEFAULT_RESPONSE)
super().__init__()
def _prepare(self):
@@ -36,6 +40,9 @@ def _prepare(self):
self.ddb_table = boto3.resource("dynamodb").Table(self.table_name)
self.success_messages.clear()
+ def response(self) -> PartialItemFailureResponse:
+ return self.batch_response
+
def _clean(self):
# It's called once, *after* closing processing all records (closing the context manager)
# Here we're sending, at once, all successful messages to a ddb table
diff --git a/examples/batch_processing/src/custom_partial_processor_payload.json b/examples/batch_processing/src/custom_partial_processor_payload.json
new file mode 100644
index 00000000000..421305a8c3d
--- /dev/null
+++ b/examples/batch_processing/src/custom_partial_processor_payload.json
@@ -0,0 +1,14 @@
+{
+ "Records": [
+ {
+ "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d",
+ "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a",
+ "body": "{\"Message\": \"success\"}"
+ },
+ {
+ "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a",
+ "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a",
+ "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg=="
+ }
+ ]
+ }
diff --git a/examples/batch_processing/src/getting_started_sqs_fifo_skip_on_error.py b/examples/batch_processing/src/getting_started_sqs_fifo_skip_on_error.py
new file mode 100644
index 00000000000..83015483d1f
--- /dev/null
+++ b/examples/batch_processing/src/getting_started_sqs_fifo_skip_on_error.py
@@ -0,0 +1,23 @@
+from aws_lambda_powertools import Logger, Tracer
+from aws_lambda_powertools.utilities.batch import (
+ SqsFifoPartialProcessor,
+ process_partial_response,
+)
+from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+processor = SqsFifoPartialProcessor(skip_group_on_error=True)
+tracer = Tracer()
+logger = Logger()
+
+
+@tracer.capture_method
+def record_handler(record: SQSRecord):
+ payload: str = record.json_body # if json string data, otherwise record.body for str
+ logger.info(payload)
+
+
+@logger.inject_lambda_context
+@tracer.capture_lambda_handler
+def lambda_handler(event, context: LambdaContext):
+ return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context)
diff --git a/examples/event_handler_bedrock_agents/src/customizing_bedrock_api_operations.py b/examples/event_handler_bedrock_agents/src/customizing_bedrock_api_operations.py
index 6eb2393b263..5cd4c9d95c4 100644
--- a/examples/event_handler_bedrock_agents/src/customizing_bedrock_api_operations.py
+++ b/examples/event_handler_bedrock_agents/src/customizing_bedrock_api_operations.py
@@ -2,7 +2,7 @@
from typing_extensions import Annotated
from aws_lambda_powertools.event_handler import BedrockAgentResolver
-from aws_lambda_powertools.event_handler.openapi.params import Body, Query
+from aws_lambda_powertools.event_handler.openapi.params import Body, Path
from aws_lambda_powertools.utilities.typing import LambdaContext
app = BedrockAgentResolver()
@@ -22,7 +22,7 @@
tags=["todos"],
)
def get_todo_title(
- todo_id: Annotated[int, Query(description="The ID of the TODO item to get the title from")],
+ todo_id: Annotated[int, Path(description="The ID of the TODO item from which to retrieve the title")],
) -> Annotated[str, Body(description="The TODO title")]:
todo = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}")
todo.raise_for_status()
diff --git a/examples/event_sources/src/cloudwatch_alarm_event.py b/examples/event_sources/src/cloudwatch_alarm_event.py
new file mode 100644
index 00000000000..503c25ef0b0
--- /dev/null
+++ b/examples/event_sources/src/cloudwatch_alarm_event.py
@@ -0,0 +1,19 @@
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities.data_classes import CloudWatchAlarmEvent, event_source
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+
+@event_source(data_class=CloudWatchAlarmEvent)
+def lambda_handler(event: CloudWatchAlarmEvent, context: LambdaContext) -> dict:
+ logger.info(f"Alarm {event.alarm_data.alarm_name} state is {event.alarm_data.state.value}")
+
+ # You can now work with event. For example, you can enrich the received data, and
+ # decide on how you want to route the alarm.
+
+ return {
+ "name": event.alarm_data.alarm_name,
+ "arn": event.alarm_arn,
+ "urgent": "Priority: P1" in (event.alarm_data.configuration.description or ""),
+ }
diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml
index 6917db8fda8..511b6cd47b1 100644
--- a/examples/logger/sam/template.yaml
+++ b/examples/logger/sam/template.yaml
@@ -14,7 +14,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
Resources:
LoggerLambdaHandlerExample:
diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml
index f8b6107f405..355fb7ea9f1 100644
--- a/examples/metrics/sam/template.yaml
+++ b/examples/metrics/sam/template.yaml
@@ -15,7 +15,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
Resources:
CaptureLambdaHandlerExample:
diff --git a/examples/metrics/src/set_custom_timestamp_log_metrics.py b/examples/metrics/src/set_custom_timestamp_log_metrics.py
new file mode 100644
index 00000000000..4a6cda23ed3
--- /dev/null
+++ b/examples/metrics/src/set_custom_timestamp_log_metrics.py
@@ -0,0 +1,15 @@
+import datetime
+
+from aws_lambda_powertools import Metrics
+from aws_lambda_powertools.metrics import MetricUnit
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+metrics = Metrics()
+
+
+@metrics.log_metrics # ensures metrics are flushed upon request completion/failure
+def lambda_handler(event: dict, context: LambdaContext):
+ metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1)
+
+ metric_timestamp = int((datetime.datetime.now() - datetime.timedelta(days=2)).timestamp() * 1000)
+ metrics.set_timestamp(metric_timestamp)
diff --git a/examples/metrics/src/single_metric_with_different_timestamp.py b/examples/metrics/src/single_metric_with_different_timestamp.py
new file mode 100644
index 00000000000..bd99041c007
--- /dev/null
+++ b/examples/metrics/src/single_metric_with_different_timestamp.py
@@ -0,0 +1,18 @@
+from aws_lambda_powertools import Logger, single_metric
+from aws_lambda_powertools.metrics import MetricUnit
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger()
+
+
+def lambda_handler(event: dict, context: LambdaContext):
+
+ for record in event:
+
+ record_id: str = record.get("record_id")
+ amount: int = record.get("amount")
+ timestamp: int = record.get("timestamp")
+
+ with single_metric(name="Orders", unit=MetricUnit.Count, value=amount, namespace="Powertools") as metric:
+ logger.info(f"Processing record id {record_id}")
+ metric.set_timestamp(timestamp)
diff --git a/examples/metrics/src/single_metric_with_different_timestamp_payload.json b/examples/metrics/src/single_metric_with_different_timestamp_payload.json
new file mode 100644
index 00000000000..4cd85c6a760
--- /dev/null
+++ b/examples/metrics/src/single_metric_with_different_timestamp_payload.json
@@ -0,0 +1,27 @@
+[
+ {
+ "record_id": "6ba7b810-9dad-11d1-80b4-00c04fd430c8",
+ "amount": 10,
+ "timestamp": 1648195200000
+ },
+ {
+ "record_id": "6ba7b811-9dad-11d1-80b4-00c04fd430c8",
+ "amount": 30,
+ "timestamp": 1648224000000
+ },
+ {
+ "record_id": "6ba7b812-9dad-11d1-80b4-00c04fd430c8",
+ "amount": 25,
+ "timestamp": 1648209600000
+ },
+ {
+ "record_id": "6ba7b813-9dad-11d1-80b4-00c04fd430c8",
+ "amount": 40,
+ "timestamp": 1648177200000
+ },
+ {
+ "record_id": "6ba7b814-9dad-11d1-80b4-00c04fd430c8",
+ "amount": 32,
+ "timestamp": 1648216800000
+ }
+]
diff --git a/examples/parameters/src/getting_started_set_single_ssm_parameter.py b/examples/parameters/src/getting_started_set_single_ssm_parameter.py
new file mode 100644
index 00000000000..4718d99105f
--- /dev/null
+++ b/examples/parameters/src/getting_started_set_single_ssm_parameter.py
@@ -0,0 +1,12 @@
+from aws_lambda_powertools.utilities import parameters
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ try:
+ # Set a single parameter, returns the version ID of the parameter
+ parameter_version = parameters.set_parameter(name="/mySuper/Parameter", value="PowerToolsIsAwesome")
+
+ return {"mySuperParameterVersion": parameter_version, "statusCode": 200}
+ except parameters.exceptions.SetParameterError as error:
+ return {"comments": None, "message": str(error), "statusCode": 400}
diff --git a/examples/parameters/src/getting_started_set_ssm_parameter_overwrite.py b/examples/parameters/src/getting_started_set_ssm_parameter_overwrite.py
new file mode 100644
index 00000000000..a80cf2d9818
--- /dev/null
+++ b/examples/parameters/src/getting_started_set_ssm_parameter_overwrite.py
@@ -0,0 +1,17 @@
+from aws_lambda_powertools.utilities import parameters
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+
+def lambda_handler(event: dict, context: LambdaContext) -> dict:
+ try:
+ # Set a single parameter, but overwrite if it already exists.
+ # Overwrite is False by default, so we explicitly set it to True
+ updating_parameter = parameters.set_parameter(
+ name="/mySuper/Parameter",
+ value="PowerToolsIsAwesome",
+ overwrite=True,
+ )
+
+ return {"mySuperParameterVersion": updating_parameter, "statusCode": 200}
+ except parameters.exceptions.SetParameterError as error:
+ return {"comments": None, "message": str(error), "statusCode": 400}
diff --git a/examples/parameters/src/getting_started_setting_secret.py b/examples/parameters/src/getting_started_setting_secret.py
new file mode 100644
index 00000000000..50412380fdf
--- /dev/null
+++ b/examples/parameters/src/getting_started_setting_secret.py
@@ -0,0 +1,30 @@
+from typing import Any
+
+from aws_lambda_powertools import Logger
+from aws_lambda_powertools.utilities import parameters
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+logger = Logger(serialize_stacktrace=True)
+
+
+def access_token(client_id: str, client_secret: str, audience: str) -> str:
+ # example function that returns a JWT Access Token
+ # add your own logic here
+ return f"{client_id}.{client_secret}.{audience}"
+
+
+def lambda_handler(event: dict, context: LambdaContext):
+ try:
+ client_id: Any = parameters.get_parameter("/aws-powertools/client_id")
+ client_secret: Any = parameters.get_parameter("/aws-powertools/client_secret")
+ audience: Any = parameters.get_parameter("/aws-powertools/audience")
+
+ jwt_token = access_token(client_id=client_id, client_secret=client_secret, audience=audience)
+
+ # set-secret will create a new secret if it doesn't exist and return the version id
+ update_secret_version_id = parameters.set_secret(name="/aws-powertools/jwt_token", value=jwt_token)
+
+ return {"access_token": "updated", "statusCode": 200, "update_secret_version_id": update_secret_version_id}
+ except parameters.exceptions.SetSecretError as error:
+ logger.exception(error)
+ return {"access_token": "updated", "statusCode": 400}
diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml
index f624eb636b0..f85babd449b 100644
--- a/examples/tracer/sam/template.yaml
+++ b/examples/tracer/sam/template.yaml
@@ -13,7 +13,7 @@ Globals:
Layers:
# Find the latest Layer version in the official documentation
# https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:66
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67
Resources:
CaptureLambdaHandlerExample:
diff --git a/layer/scripts/layer-balancer/go.mod b/layer/scripts/layer-balancer/go.mod
index acf5e6bbb5d..8b226402756 100644
--- a/layer/scripts/layer-balancer/go.mod
+++ b/layer/scripts/layer-balancer/go.mod
@@ -3,25 +3,25 @@ module layerbalancer
go 1.18
require (
- github.com/aws/aws-sdk-go-v2 v1.25.3
- github.com/aws/aws-sdk-go-v2/config v1.27.7
- github.com/aws/aws-sdk-go-v2/service/lambda v1.53.2
+ github.com/aws/aws-sdk-go-v2 v1.26.0
+ github.com/aws/aws-sdk-go-v2/config v1.27.9
+ github.com/aws/aws-sdk-go-v2/service/lambda v1.53.3
golang.org/x/exp v0.0.0-20230321023759-10a507213a29
golang.org/x/sync v0.6.0
)
require (
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect
- github.com/aws/aws-sdk-go-v2/credentials v1.17.7 // indirect
- github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 // indirect
- github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 // indirect
- github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.17.9 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.0 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect
- github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5 // indirect
- github.com/aws/aws-sdk-go-v2/service/sso v1.20.2 // indirect
- github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 // indirect
- github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.20.3 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.3 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.28.5 // indirect
github.com/aws/smithy-go v1.20.1 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
)
diff --git a/layer/scripts/layer-balancer/go.sum b/layer/scripts/layer-balancer/go.sum
index 1a40db5878c..edf71430f73 100644
--- a/layer/scripts/layer-balancer/go.sum
+++ b/layer/scripts/layer-balancer/go.sum
@@ -1,31 +1,31 @@
-github.com/aws/aws-sdk-go-v2 v1.25.3 h1:xYiLpZTQs1mzvz5PaI6uR0Wh57ippuEthxS4iK5v0n0=
-github.com/aws/aws-sdk-go-v2 v1.25.3/go.mod h1:35hUlJVYd+M++iLI3ALmVwMOyRYMmRqUXpTtRGW+K9I=
+github.com/aws/aws-sdk-go-v2 v1.26.0 h1:/Ce4OCiM3EkpW7Y+xUnfAFpchU78K7/Ug01sZni9PgA=
+github.com/aws/aws-sdk-go-v2 v1.26.0/go.mod h1:35hUlJVYd+M++iLI3ALmVwMOyRYMmRqUXpTtRGW+K9I=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo=
-github.com/aws/aws-sdk-go-v2/config v1.27.7 h1:JSfb5nOQF01iOgxFI5OIKWwDiEXWTyTgg1Mm1mHi0A4=
-github.com/aws/aws-sdk-go-v2/config v1.27.7/go.mod h1:PH0/cNpoMO+B04qET699o5W92Ca79fVtbUnvMIZro4I=
-github.com/aws/aws-sdk-go-v2/credentials v1.17.7 h1:WJd+ubWKoBeRh7A5iNMnxEOs982SyVKOJD+K8HIezu4=
-github.com/aws/aws-sdk-go-v2/credentials v1.17.7/go.mod h1:UQi7LMR0Vhvs+44w5ec8Q+VS+cd10cjwgHwiVkE0YGU=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 h1:p+y7FvkK2dxS+FEwRIDHDe//ZX+jDhP8HHE50ppj4iI=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3/go.mod h1:/fYB+FZbDlwlAiynK9KDXlzZl3ANI9JkD0Uhz5FjNT4=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 h1:ifbIbHZyGl1alsAhPIYsHOg5MuApgqOvVeI8wIugXfs=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3/go.mod h1:oQZXg3c6SNeY6OZrDY+xHcF4VGIEoNotX2B4PrDeoJI=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 h1:Qvodo9gHG9F3E8SfYOspPeBt0bjSbsevK8WhRAUHcoY=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3/go.mod h1:vCKrdLXtybdf/uQd/YfVR2r5pcbNuEYKzMQpcxmeSJw=
+github.com/aws/aws-sdk-go-v2/config v1.27.9 h1:gRx/NwpNEFSk+yQlgmk1bmxxvQ5TyJ76CWXs9XScTqg=
+github.com/aws/aws-sdk-go-v2/config v1.27.9/go.mod h1:dK1FQfpwpql83kbD873E9vz4FyAxuJtR22wzoXn3qq0=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.9 h1:N8s0/7yW+h8qR8WaRlPQeJ6czVMNQVNtNdUqf6cItao=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.9/go.mod h1:446YhIdmSV0Jf/SLafGZalQo+xr2iw7/fzXGDPTU1yQ=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.0 h1:af5YzcLf80tv4Em4jWVD75lpnOHSBkPUZxZfGkrI3HI=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.0/go.mod h1:nQ3how7DMnFMWiU1SpECohgC82fpn4cKZ875NDMmwtA=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4 h1:0ScVK/4qZ8CIW0k8jOeFVsyS/sAiXpYxRBLolMkuLQM=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.4/go.mod h1:84KyjNZdHC6QZW08nfHI6yZgPd+qRgaWcYsyLUo3QY8=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4 h1:sHmMWWX5E7guWEFQ9SVo6A3S4xpPrWnd77a6y4WM6PU=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.4/go.mod h1:WjpDrhWisWOIoS9n3nk67A3Ll1vfULJ9Kq6h29HTD48=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5 h1:K/NXvIftOlX+oGgWGIa3jDyYLDNsdVhsjHmsBH2GLAQ=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5/go.mod h1:cl9HGLV66EnCmMNzq4sYOti+/xo8w34CsgzVtm2GgsY=
-github.com/aws/aws-sdk-go-v2/service/lambda v1.53.2 h1:lkPeNqnIPFKWEhHbdT1oinjmhTjb9ZU01tFfXgi4UAM=
-github.com/aws/aws-sdk-go-v2/service/lambda v1.53.2/go.mod h1:BvYv8HrEOHY7GQTDA3abDNj2sn/vtOZZJ9QuxZ+BSBI=
-github.com/aws/aws-sdk-go-v2/service/sso v1.20.2 h1:XOPfar83RIRPEzfihnp+U6udOveKZJvPQ76SKWrLRHc=
-github.com/aws/aws-sdk-go-v2/service/sso v1.20.2/go.mod h1:Vv9Xyk1KMHXrR3vNQe8W5LMFdTjSeWk0gBZBzvf3Qa0=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 h1:pi0Skl6mNl2w8qWZXcdOyg197Zsf4G97U7Sso9JXGZE=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2/go.mod h1:JYzLoEVeLXk+L4tn1+rrkfhkxl6mLDEVaDSvGq9og90=
-github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 h1:Ppup1nVNAOWbBOrcoOxaxPeEnSFB2RnnQdguhXpmeQk=
-github.com/aws/aws-sdk-go-v2/service/sts v1.28.4/go.mod h1:+K1rNPVyGxkRuv9NNiaZ4YhBFuyw2MMA9SlIJ1Zlpz8=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6 h1:b+E7zIUHMmcB4Dckjpkapoy47W6C9QBv/zoUP+Hn8Kc=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.6/go.mod h1:S2fNV0rxrP78NhPbCZeQgY8H9jdDMeGtwcfZIRxzBqU=
+github.com/aws/aws-sdk-go-v2/service/lambda v1.53.3 h1:KsKBuL+bIKhY7SMk+MXSBAj8PLHsTqlU2d0px98azyI=
+github.com/aws/aws-sdk-go-v2/service/lambda v1.53.3/go.mod h1:trTURvQC8AJ41JYhFpVrZKY5tfzGgVUcSijVgfmgl8w=
+github.com/aws/aws-sdk-go-v2/service/sso v1.20.3 h1:mnbuWHOcM70/OFUlZZ5rcdfA8PflGXXiefU/O+1S3+8=
+github.com/aws/aws-sdk-go-v2/service/sso v1.20.3/go.mod h1:5HFu51Elk+4oRBZVxmHrSds5jFXmFj8C3w7DVF2gnrs=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.3 h1:uLq0BKatTmDzWa/Nu4WO0M1AaQDaPpwTKAeByEc6WFM=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.3/go.mod h1:b+qdhjnxj8GSR6t5YfphOffeoQSQ1KmpoVVuBn+PWxs=
+github.com/aws/aws-sdk-go-v2/service/sts v1.28.5 h1:J/PpTf/hllOjx8Xu9DMflff3FajfLxqM5+tepvVXmxg=
+github.com/aws/aws-sdk-go-v2/service/sts v1.28.5/go.mod h1:0ih0Z83YDH/QeQ6Ori2yGE2XvWYv/Xm+cZc01LC6oK0=
github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw=
github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
diff --git a/package-lock.json b/package-lock.json
index a26c0c84d30..6b5efa81825 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -11,13 +11,13 @@
"package-lock.json": "^1.0.0"
},
"devDependencies": {
- "aws-cdk": "^2.131.0"
+ "aws-cdk": "^2.133.0"
}
},
"node_modules/aws-cdk": {
- "version": "2.131.0",
- "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.131.0.tgz",
- "integrity": "sha512-ji+MwGFGC88HE/EqV6/VARBp5mu3nXIDa/GYwtGycJqu6WqXhNZXWeDH0JsWaY6+BSUdpY6pr6KWpV+MDyVkDg==",
+ "version": "2.133.0",
+ "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.133.0.tgz",
+ "integrity": "sha512-EwH8VgQQ8ODeMwjE3p+WhbcbWNkCbvuJJl+Py9IB5znGf7GwLcEmOu4YWBsBGPVu41SXbSAf36twMBrJytCFZA==",
"dev": true,
"bin": {
"cdk": "bin/cdk"
diff --git a/package.json b/package.json
index bc0fc07879e..6521bc16769 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "aws-lambda-powertools-python-e2e",
"version": "1.0.0",
"devDependencies": {
- "aws-cdk": "^2.131.0"
+ "aws-cdk": "^2.133.0"
},
"dependencies": {
"package-lock.json": "^1.0.0"
diff --git a/poetry.lock b/poetry.lock
index 40a43b49764..5b2d55ddf74 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "anyio"
@@ -158,17 +158,17 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-aws-lambda-python-alpha"
-version = "2.131.0a0"
+version = "2.133.0a0"
description = "The CDK Construct Library for AWS Lambda in Python"
optional = false
python-versions = "~=3.8"
files = [
- {file = "aws-cdk.aws-lambda-python-alpha-2.131.0a0.tar.gz", hash = "sha256:ab93373302059a147bc8dc393d293035aae654d1d4bdbfd3aa4a12481abd8cce"},
- {file = "aws_cdk.aws_lambda_python_alpha-2.131.0a0-py3-none-any.whl", hash = "sha256:3725708b5099053e3c4814a80cf9d073f4ccf43d683e0bc53e895f61df7cc481"},
+ {file = "aws-cdk.aws-lambda-python-alpha-2.133.0a0.tar.gz", hash = "sha256:1fb72aab5ce0183a0f94f30973e5c4d3c3cb7d191f334165b99ae4e1a7a09aa0"},
+ {file = "aws_cdk.aws_lambda_python_alpha-2.133.0a0-py3-none-any.whl", hash = "sha256:34e843875b4ce1f794b059545f0db3780011f04a1d614eae04f5507a8b4479a7"},
]
[package.dependencies]
-aws-cdk-lib = ">=2.131.0,<3.0.0"
+aws-cdk-lib = ">=2.133.0,<3.0.0"
constructs = ">=10.0.0,<11.0.0"
jsii = ">=1.94.0,<2.0.0"
publication = ">=0.0.3"
@@ -176,13 +176,13 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-lib"
-version = "2.131.0"
+version = "2.133.0"
description = "Version 2 of the AWS Cloud Development Kit library"
optional = false
python-versions = "~=3.8"
files = [
- {file = "aws-cdk-lib-2.131.0.tar.gz", hash = "sha256:9137a078812e173d970086595cc9a2dd45f81e82fc48ed84b9ffbfe6a9af3214"},
- {file = "aws_cdk_lib-2.131.0-py3-none-any.whl", hash = "sha256:2459f8d191ee5e04a274f62b75117d402c5a4f123dbf316da8a9dc375d266d88"},
+ {file = "aws-cdk-lib-2.133.0.tar.gz", hash = "sha256:ef09b237f6840bdaae5081b35bbc9566a9bc38aaf6bd736441db9ee06298312e"},
+ {file = "aws_cdk_lib-2.133.0-py3-none-any.whl", hash = "sha256:59c534e54e2b68ef514b41c5799f65094a0e580a6e0b5f0a275409d34d4bafc7"},
]
[package.dependencies]
@@ -196,19 +196,19 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-encryption-sdk"
-version = "3.1.1"
+version = "3.2.0"
description = "AWS Encryption SDK implementation for Python"
optional = true
python-versions = "*"
files = [
- {file = "aws-encryption-sdk-3.1.1.tar.gz", hash = "sha256:8d5fbf018fc68d6b1cacbe4dd037fd805296c7736a9fe457eb684d053f7f9563"},
- {file = "aws_encryption_sdk-3.1.1-py2.py3-none-any.whl", hash = "sha256:a3cbbf04e0b9038b9180af8b03da896af19083e00ca011dcfcb403421458ad02"},
+ {file = "aws-encryption-sdk-3.2.0.tar.gz", hash = "sha256:4304fcf8ce2aa3fa98b1acff7a3bf3cd0528c329c0c437b55e0f456bbf62347e"},
+ {file = "aws_encryption_sdk-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e3208809133b4491a5c6d8f3e6622fceb9d5b7c157c90a0f2a2e3ae4504fa31"},
]
[package.dependencies]
attrs = ">=17.4.0"
boto3 = ">=1.10.0"
-cryptography = ">=2.5.0"
+cryptography = ">=3.4.6"
wrapt = ">=1.10.11"
[[package]]
@@ -227,13 +227,13 @@ requests = ">=0.14.0"
[[package]]
name = "aws-sam-translator"
-version = "1.85.0"
+version = "1.86.0"
description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates"
optional = false
python-versions = ">=3.8, <=4.0, !=4.0"
files = [
- {file = "aws-sam-translator-1.85.0.tar.gz", hash = "sha256:e41938affa128fb5bde5e1989b260bf539a96369bba3faf316ce66651351df39"},
- {file = "aws_sam_translator-1.85.0-py3-none-any.whl", hash = "sha256:e8c69a4db7279421ff6c3579cd4d43395fe9b6781f50416528e984be68e25481"},
+ {file = "aws-sam-translator-1.86.0.tar.gz", hash = "sha256:a748dcd7886024cb7586abbbdbabe8c787c44c6547bb6602879d7bb8a6934d05"},
+ {file = "aws_sam_translator-1.86.0-py3-none-any.whl", hash = "sha256:97a44e5ac8b0d141c31f4ed35c57aa94429a0e6cef7fe989831c9a1c40455473"},
]
[package.dependencies]
@@ -279,13 +279,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "bandit"
-version = "1.7.7"
+version = "1.7.8"
description = "Security oriented static analyser for python code."
optional = false
python-versions = ">=3.8"
files = [
- {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"},
- {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"},
+ {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"},
+ {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"},
]
[package.dependencies]
@@ -296,39 +296,40 @@ stevedore = ">=1.20.0"
[package.extras]
baseline = ["GitPython (>=3.1.30)"]
+sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"]
test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"]
toml = ["tomli (>=1.1.0)"]
yaml = ["PyYAML"]
[[package]]
name = "black"
-version = "24.2.0"
+version = "24.3.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"},
- {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"},
- {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"},
- {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"},
- {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"},
- {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"},
- {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"},
- {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"},
- {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"},
- {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"},
- {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"},
- {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"},
- {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"},
- {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"},
- {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"},
- {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"},
- {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"},
- {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"},
- {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"},
- {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"},
- {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"},
- {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"},
+ {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
+ {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
+ {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
+ {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
+ {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
+ {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
+ {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
+ {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
+ {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
+ {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
+ {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
+ {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
+ {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
+ {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
+ {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
+ {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
+ {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
+ {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
+ {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
+ {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
+ {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
+ {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
]
[package.dependencies]
@@ -428,38 +429,38 @@ ujson = ["ujson (>=5.7.0)"]
[[package]]
name = "cdk-nag"
-version = "2.28.54"
+version = "2.28.72"
description = "Check CDK v2 applications for best practices using a combination on available rule packs."
optional = false
python-versions = "~=3.8"
files = [
- {file = "cdk-nag-2.28.54.tar.gz", hash = "sha256:55f17c15369e38ec1f3b8dc40ff1830bfe0452c95c35d1e37317dc36269be3ce"},
- {file = "cdk_nag-2.28.54-py3-none-any.whl", hash = "sha256:bf901e9f33d39b9d2bfa7117aef14917f708b518494f2247dbdbdba666154164"},
+ {file = "cdk-nag-2.28.72.tar.gz", hash = "sha256:f0f265c399dbcedd3293264b19707b91941318a085435c61f35e946c57024f8a"},
+ {file = "cdk_nag-2.28.72-py3-none-any.whl", hash = "sha256:0595efe44b3c0b144d3f38ac9b63be70cbb1490fda2c031bdda8af4535c63899"},
]
[package.dependencies]
aws-cdk-lib = ">=2.116.0,<3.0.0"
constructs = ">=10.0.5,<11.0.0"
-jsii = ">=1.94.0,<2.0.0"
+jsii = ">=1.96.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "cdklabs-generative-ai-cdk-constructs"
-version = "0.1.83"
+version = "0.1.104"
description = "AWS Generative AI CDK Constructs is a library for well-architected generative AI patterns."
optional = false
python-versions = "~=3.8"
files = [
- {file = "cdklabs.generative-ai-cdk-constructs-0.1.83.tar.gz", hash = "sha256:c259c531a7994a3b18bd3cbc18c112f72221a4d6a2aedf325f2364692d7a9344"},
- {file = "cdklabs.generative_ai_cdk_constructs-0.1.83-py3-none-any.whl", hash = "sha256:05bdee0e50fa518b5aa26b4c110fc166401739aacbaaebaaa98bb65e36633383"},
+ {file = "cdklabs.generative-ai-cdk-constructs-0.1.104.tar.gz", hash = "sha256:69f0b6e1fcf53f8f509d2ce2d60a18bd6b07114b07c519ab0189d3f1902b3026"},
+ {file = "cdklabs.generative_ai_cdk_constructs-0.1.104-py3-none-any.whl", hash = "sha256:6fb3862d3050be498ba3a03b0d65e1ee0df0b91b7150fc52f26d4bdde1f93b56"},
]
[package.dependencies]
aws-cdk-lib = ">=2.122.0,<3.0.0"
-cdk-nag = ">=2.28.52,<3.0.0"
+cdk-nag = ">=2.28.72,<3.0.0"
constructs = ">=10.3.0,<11.0.0"
-jsii = ">=1.94.0,<2.0.0"
+jsii = ">=1.96.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
@@ -540,17 +541,17 @@ pycparser = "*"
[[package]]
name = "cfn-lint"
-version = "0.86.0"
+version = "0.86.1"
description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved"
optional = false
-python-versions = ">=3.8, <=4.0, !=4.0"
+python-versions = "!=4.0,<=4.0,>=3.8"
files = [
- {file = "cfn-lint-0.86.0.tar.gz", hash = "sha256:7216e9c10dd27af73821d0ae79b17406cd89f5dfbc25feb5d2ba756eb6e9a651"},
- {file = "cfn_lint-0.86.0-py3-none-any.whl", hash = "sha256:70cefa0ab91e35698cd8c03a2f99a367d71d848da5a62123192552937652d542"},
+ {file = "cfn-lint-0.86.1.tar.gz", hash = "sha256:ed41e596d807fea2de74dbbfc0cb8b48f8787572c50e3b58cce05382a5af3a64"},
+ {file = "cfn_lint-0.86.1-py3-none-any.whl", hash = "sha256:e599b23f1e3745c11585008fc1d186665dded2fb0ded3cc05e30e7d7b0830082"},
]
[package.dependencies]
-aws-sam-translator = ">=1.85.0"
+aws-sam-translator = ">=1.86.0"
jschema-to-python = ">=1.2.3,<1.3.0"
jsonpatch = "*"
jsonschema = ">=3.0,<5"
@@ -714,63 +715,63 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "coverage"
-version = "7.4.3"
+version = "7.4.4"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"},
- {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"},
- {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"},
- {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"},
- {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"},
- {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"},
- {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"},
- {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"},
- {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"},
- {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"},
- {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"},
- {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"},
- {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"},
- {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"},
- {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"},
- {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"},
- {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"},
- {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"},
- {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"},
- {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"},
- {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"},
- {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"},
- {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"},
- {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"},
- {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"},
- {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"},
- {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"},
- {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"},
- {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"},
- {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"},
- {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"},
- {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"},
- {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"},
- {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"},
- {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"},
- {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"},
- {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"},
- {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"},
- {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"},
- {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"},
- {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"},
- {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"},
- {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"},
- {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"},
- {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"},
- {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"},
- {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"},
- {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"},
- {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"},
- {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"},
- {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"},
- {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"},
+ {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"},
+ {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"},
+ {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"},
+ {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"},
+ {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"},
+ {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"},
+ {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"},
+ {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"},
+ {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"},
+ {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"},
+ {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"},
+ {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"},
+ {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"},
+ {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"},
+ {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"},
+ {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"},
+ {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"},
+ {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"},
+ {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"},
+ {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"},
+ {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"},
+ {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"},
+ {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"},
+ {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"},
+ {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"},
+ {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"},
+ {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"},
+ {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"},
+ {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"},
+ {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"},
+ {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"},
+ {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"},
+ {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"},
+ {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"},
+ {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"},
+ {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"},
+ {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"},
+ {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"},
+ {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"},
+ {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"},
+ {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"},
+ {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"},
+ {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"},
+ {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"},
+ {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"},
+ {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"},
+ {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"},
+ {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"},
+ {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"},
+ {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"},
+ {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"},
+ {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"},
]
[package.dependencies]
@@ -849,18 +850,18 @@ requests = ">=2.6.0"
[[package]]
name = "datadog-lambda"
-version = "5.89.0"
+version = "5.91.0"
description = "The Datadog AWS Lambda Library"
optional = false
python-versions = ">=3.8.0,<4"
files = [
- {file = "datadog_lambda-5.89.0-py3-none-any.whl", hash = "sha256:d58d01d6d047096490de004f68ce05d5261b4dba6ebfeb41cf78b2e9afd9dee9"},
- {file = "datadog_lambda-5.89.0.tar.gz", hash = "sha256:26c2d58712de0dcf7221cdc50d7f361bef217dd99b139e3636685a22cf841764"},
+ {file = "datadog_lambda-5.91.0-py3-none-any.whl", hash = "sha256:4d7d2235f306e64ccd9f02dc54b4412be1914e643f5b92113f8fa9cfe2d176d3"},
+ {file = "datadog_lambda-5.91.0.tar.gz", hash = "sha256:cdb35ce0a730233a886d37163aeb58c736b6d6e152e5236e2eca19cca5a187f4"},
]
[package.dependencies]
datadog = ">=0.41.0,<1.0.0"
-ddtrace = ">=2.5.1"
+ddtrace = ">=2.7.2"
urllib3 = [
{version = "<2.0.0", markers = "python_version < \"3.11\""},
{version = "<2.1.0", markers = "python_version >= \"3.11\""},
@@ -887,71 +888,71 @@ six = "*"
[[package]]
name = "ddtrace"
-version = "2.7.0"
+version = "2.7.2"
description = "Datadog APM client library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "ddtrace-2.7.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7c8589a46ce4170dff7d00dbcfd553413353cb40e985079fd2a8bb0da66c82f1"},
- {file = "ddtrace-2.7.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:195d7989dd2cbfd63f14a27f6872fe784deb428694082895ec63463f475c2c64"},
- {file = "ddtrace-2.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e04538a9428d7d4cb0937e88095e883f390167fef168118ce41b96afa2d6986"},
- {file = "ddtrace-2.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94ad305f16b0c377e723e3ee65599304f2bbb8a40894c2b9ce3a7fb2db8c61a2"},
- {file = "ddtrace-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80d70b4c9e0a6a6831150ac73dadb76b0b46f15f770f89274e9b1bb371beb848"},
- {file = "ddtrace-2.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7d11da8ae2b26ad31fa6c0c0394d3d461fea302023cf28c5c026982ece832cfa"},
- {file = "ddtrace-2.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60edceb914802fbf8c762eee96aa12eb35848fbc13eb237c69de34be1094db80"},
- {file = "ddtrace-2.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8661558dc12092890459336431b34427e29c63fbf83af06731b651209821359c"},
- {file = "ddtrace-2.7.0-cp310-cp310-win32.whl", hash = "sha256:3ef9cd5cf948805678fd7741a966be635fde5ac601a7a4d15a03f8ea41251bea"},
- {file = "ddtrace-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:913a8eeb25af5721997c411214cce8502bca5cd6525d79c50b0cb90e1fbb21cf"},
- {file = "ddtrace-2.7.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6e18db8ea6a174902eedbcc0952ed57492f7fa5660d9c979bd50af0966bd8a71"},
- {file = "ddtrace-2.7.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:39669deca72eb2a07e55dd1e8d78e59b2b050a028d5f0879d60745d0d80a7c0b"},
- {file = "ddtrace-2.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9533332fe12f331333cc652dacf107329a9af0f328539d4cbea578b4a999331"},
- {file = "ddtrace-2.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd6522429c5bf51e6db26824cffa27d7e45a97fc14c2c1390ca303307c6996c"},
- {file = "ddtrace-2.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:552b4933f1b1807fedc674b9505c2f2113899f0a92c47a9b811f652c87c4c294"},
- {file = "ddtrace-2.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61be4adc2607e121fc3f576fbe6ebcee06cd618862bdbe49e699b5980d474400"},
- {file = "ddtrace-2.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ebb200c292c9126d8f74c77ae8f37d395c20ad7fd70bc7082e488a5bffaf4083"},
- {file = "ddtrace-2.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:23bd2ffa56efccc5349d04cc290447223538ab002b094e7f66d2c6ebcd2e4237"},
- {file = "ddtrace-2.7.0-cp311-cp311-win32.whl", hash = "sha256:6970c21c82db282a99958ea8ac86defda3d69acc4a1db6eb6061208e6fea5e77"},
- {file = "ddtrace-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:9da6df8b0d33ef6692ab5cec8c9db507f11168986aed3f0a34254a8b7edeb73f"},
- {file = "ddtrace-2.7.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:28cd2b4e4eb890232382d8cde1734d2ebea949817b762fec541fcf41f36ec311"},
- {file = "ddtrace-2.7.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:89d6f6a73005de56deb0ec4d0c83a32fd3cc698eabc3477a9b99608717b9868e"},
- {file = "ddtrace-2.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67e9a5b73c693e69455f598ab2c62d484952f38cf9cb714efd2c6463bf845908"},
- {file = "ddtrace-2.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a78b5c7c948809146c8cec17e5e54b0c688e60c7ff92e4fdb32ce6f42b169b8"},
- {file = "ddtrace-2.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db01ac0cfb2eafd2d4ea24c879c5d0b1b7598ba45e8da06d454e70fbd2688c20"},
- {file = "ddtrace-2.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6bd9f7fd4bd8848a2a8c618682ddeaeaaf6ea8b584eb83e74a3900d6afc6d36e"},
- {file = "ddtrace-2.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:00f6946520c91b8d5436a7b0bf2919542ab31493435558beeb6c17fddb089cb4"},
- {file = "ddtrace-2.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:98f2185ad640c7ad0a373ecfb20e15bb1348bb2f93faed1f20ee77acadbb73f6"},
- {file = "ddtrace-2.7.0-cp312-cp312-win32.whl", hash = "sha256:def38d16345cf236afc80bc0da0ad17e083ea921ac94ca37562bc996d53569df"},
- {file = "ddtrace-2.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:98de3812ef588bad53797e777aa99902252f77e21ae382d6e317258d14785e26"},
- {file = "ddtrace-2.7.0-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:0b852adc63c500c80b8ff36702c00a8ca76b910a75b052bd1281105a41655288"},
- {file = "ddtrace-2.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27d0d08eabf84f508b8b3a68fea587d6525cee7c8b602a1609007e2417d1fa5a"},
- {file = "ddtrace-2.7.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf78c3499911a18b7056aba5700e595c1bec4e18b98b21767c61dbacd084ce86"},
- {file = "ddtrace-2.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd196a8884247bca392af7a4e1202c698cc489ede48535f3149b12b802eb5842"},
- {file = "ddtrace-2.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:264dafd983598de4d05893b37ffcfa35e943f2c896a5b04b76533a811a5e66d6"},
- {file = "ddtrace-2.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ab073544aad96d5275ac18b962a71554c55ca8c04b110f5fe68646f103e81349"},
- {file = "ddtrace-2.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4ef1a6dd2c86aaea539fc2607c24751eafb0a5120c80fb051a31e81c58c04cba"},
- {file = "ddtrace-2.7.0-cp37-cp37m-win32.whl", hash = "sha256:ac59ec4ceb40287abdcec097d5f6c5ab1a7be65793cdff99c14b38c214be76b4"},
- {file = "ddtrace-2.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4f2da60724d362c7b4f8cb15623fc8d160d18a116532abbeddbcbf69e4bf00ea"},
- {file = "ddtrace-2.7.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:948539aa83c49218609be87517ef87961c0a67f9bf7c21fe2c3309d2df62fb67"},
- {file = "ddtrace-2.7.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8a2c175404361f49f674bae47229a8a7659a948249aa752888adc427fa65ec03"},
- {file = "ddtrace-2.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25c4ff71805aa4b48bda7ed56d3b63f76e9c2207a3b3f871fc309233185d7ec3"},
- {file = "ddtrace-2.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd1efe43cc3a97a1f2f634c500f69f7c4c73c9bdde5148a70dabd04bc81f05d6"},
- {file = "ddtrace-2.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56efaf8620803c9e11692be72e6eed358065230d2a2e9188ad1d127f1ae9fe6a"},
- {file = "ddtrace-2.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:993493378144dd90af2b5bada3bf4128a94aeb02e8b618fa30e86881e821fd94"},
- {file = "ddtrace-2.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef2a2d068e99f0e3f0a604553c7861d5c36bba7ae92ec5af4a74547fc2be1a9b"},
- {file = "ddtrace-2.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c61d70d9408df7ec6742d7b57bb80c7168aa38b6aebe9074d360b99be2b9f1f"},
- {file = "ddtrace-2.7.0-cp38-cp38-win32.whl", hash = "sha256:2ee477ee452801746e2614f27030c2b471cdd73129d66c754ac5e7b2a1bfbe58"},
- {file = "ddtrace-2.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec6c102bd44f76ca06195c7e3c028165fc579fddb28b27951c1efff42b5e3452"},
- {file = "ddtrace-2.7.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:876a0161344e45a21f1bf7ce7c9413477adee6d425e027ee60f421146ab00942"},
- {file = "ddtrace-2.7.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:2ee00771f4deea3c354cbd73e781450ac0db34ed1c38a68f0aa0a5cbbc501e81"},
- {file = "ddtrace-2.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52778b06ccd5f595e746547660f0e3f4ce0e0dbcbd2857cf42ec56868cbbc647"},
- {file = "ddtrace-2.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfa96b9ec4e2a6e8a6edbfa3dc210cff8fe70807d53ab36ec770f7adb5c522ca"},
- {file = "ddtrace-2.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11aa0e7573ef221b26059ca36a367d34cfd2b4a6bb38215a4c7e158367fc279d"},
- {file = "ddtrace-2.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9b0f7c93786028a1afc2f7a6aa59d58c549d9ef5e1cbc90116b8d77417aa1526"},
- {file = "ddtrace-2.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:089593e3d00180388709523384d3a4da528bb6b040f5ab43e03479d91ee95cd6"},
- {file = "ddtrace-2.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fe5392100712056d9852070612c5ae937f86fba6c99ee1493abfd8009141652"},
- {file = "ddtrace-2.7.0-cp39-cp39-win32.whl", hash = "sha256:950f57688b248d7be68e9614eee362dcda6f3d8e41ddf07ba475c0765e267a62"},
- {file = "ddtrace-2.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:d6b213a0e129ed4ef93845d1d6af7bb79e7e6319d20f1c0b87099e23d7d38512"},
- {file = "ddtrace-2.7.0.tar.gz", hash = "sha256:15702a0c4345a7610a548a22889d452b271c49100896204ab5ab0d95ae01ea52"},
+ {file = "ddtrace-2.7.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:31a0a4ffefdc6c20e9e4ef663b411ea66bd2a4113bec7f10292df00b75e883f3"},
+ {file = "ddtrace-2.7.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:fd148fce8c18a278b055f7e1b4c56e5b3214cd17fc42882dfb987826a00197d6"},
+ {file = "ddtrace-2.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0febe3be1c06b7b3ea64aa21d0a37bc06f9a4c3291e833e95687c10be459a2"},
+ {file = "ddtrace-2.7.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cc7663f1c7d42f47266ae135b4ee16773e125417597e24da86bb78ecc82f85b"},
+ {file = "ddtrace-2.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06c6b7e6f153fb739de3da62cb9d99283a2669f8ebeb92238d272803939c7433"},
+ {file = "ddtrace-2.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a7879965428bb7c6abd020031ef3a5ffcc0104b7c15f021dcc0315bc421a721a"},
+ {file = "ddtrace-2.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3d33c43e8302c72d1b2b7a854d4a17c787973e61ec76cd7fc6434839aefc7c"},
+ {file = "ddtrace-2.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fcbb686d0ffe47df42fe092e020302c912c956da742cf4787e616c8f73a26c8b"},
+ {file = "ddtrace-2.7.2-cp310-cp310-win32.whl", hash = "sha256:f9a76c303cec59216b706186e2de38ae1d650405660277fed121c7658f320cf7"},
+ {file = "ddtrace-2.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:4385b4f4f8ec7313ead4d852d8dd50cae4c45f49b3893cc6aa4a64a3b3be93b8"},
+ {file = "ddtrace-2.7.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:4dd90dc7c173edc32283b4f70937ea01ec43924a1b0af7ef6bbaa22076210860"},
+ {file = "ddtrace-2.7.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:923462adde72f363821c0c165ac78aa76236ae12022d44ad7c51b8870595bbaa"},
+ {file = "ddtrace-2.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:190c4eefc1e3c0a7befd995bf10b51451ddd497fb636fd825d7f8527e28c5864"},
+ {file = "ddtrace-2.7.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5016bc73e92adef4017e8cf7fff8a49a2c0fad8dcac600459fa30f63dbab8be"},
+ {file = "ddtrace-2.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c230035c714ed9ea3dd16d65813f539ba9c30c87294107d5f77cdddad430a086"},
+ {file = "ddtrace-2.7.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1689277365728d5735931b98ef64115d958ab76fb698472e7d92a1f71bf0000b"},
+ {file = "ddtrace-2.7.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3e3a19ae9e8e2e6aff56aa93d73a0d72ce5530c1f0347b7ebba68b5c437efe49"},
+ {file = "ddtrace-2.7.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e03cf1787728ae01cb8cd0b474b09461d47afb15a2146b1753bee80a27568d86"},
+ {file = "ddtrace-2.7.2-cp311-cp311-win32.whl", hash = "sha256:081ba7c3d876c6dde6d3f8078205e3ae06932f0dbe5cb283f9bdc99052c262de"},
+ {file = "ddtrace-2.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:0299654ce610fe4d0f73b9c599bfaacd17537d1193cc7be95fb8e5238bed0ffa"},
+ {file = "ddtrace-2.7.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:283d1ed0d496e07b80ef372f5e78d5a5aa86a70b59b1a0039d655d5796d8cd37"},
+ {file = "ddtrace-2.7.2-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a0e0ad2f20ce6942b3ceca0578be72416aacd6f63a7ef07de5a86ea524b16ad4"},
+ {file = "ddtrace-2.7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2094747698d8ffd50339b4c8142923371272a4e919a1f56cc75e8cce868ff638"},
+ {file = "ddtrace-2.7.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd239ba8a9762ef1defb7bb5c70e8b488987b462936f6f9f70a6613b35376178"},
+ {file = "ddtrace-2.7.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82170d1d5153554dcfd475c0c1ab64f315cc7f00c5cf6c6bb471025b661ecc41"},
+ {file = "ddtrace-2.7.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3140f5db9313f6a14d02a9036f16a1d5311261daec2d90160db829d08593ce1e"},
+ {file = "ddtrace-2.7.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:134194fa8e8c14798374074c5472f33479cf5220dfccea79e1abaea7f57bdef2"},
+ {file = "ddtrace-2.7.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5ce6f31d785762b80a8a8d346bdd302f15977cf0b0e13f81f4fdbf7815bae2c4"},
+ {file = "ddtrace-2.7.2-cp312-cp312-win32.whl", hash = "sha256:0e6cd36d2373345863b3664f440b0255c1313e4f7ea3ac343de38ffe5402fa90"},
+ {file = "ddtrace-2.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:8ee761d7dfa01ccfeeb81215d16da27d0cfcc47a58a6b582dfd5816bccb64005"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c3b55cb4fc6ec1994f7f1e44dfbf62f46069b16cebe8b26781a3b198c821591d"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aa91a2c729f9187a75084b2a0fce23c63a8d3181e9c33a640e9e638ddbc7079"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4ce51a957f21ae997795a9a2e9f11fb988718417012e2a5765f74e157f3099a"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb0adacd1116b1043f92382fd3dc9e7deabd6d788c15c2b1e3b0f75c4adb711"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99a6cf91b3ab290afac26fa61b81b746677b1627df12373919219fd562881c2d"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4f8ac0ac0970d65223247c879729c4c489e3cc69529b54e9dd2051efc68a007"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6052dd35bb0ec6d1023f0c4de9b0426a9e16d80fd8152d8eb8135e34bf41e1df"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-win32.whl", hash = "sha256:641e440ac175bb04e03e34543ed48a3ddfe4a393712c62deb2f2c78adb48db90"},
+ {file = "ddtrace-2.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:1687a40014873860b8c87a9a3e18dee51fa6a593e4758f973ed4cb8832b4e53a"},
+ {file = "ddtrace-2.7.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d5529a7e4a083ec1388872c5a9b41b38622a7146d27d3bdee81d701f0ac6fc38"},
+ {file = "ddtrace-2.7.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:df4b51f39b260d8706fdf5417f3f94277f76b951cbbeabdb2b3a597d5f6cd0c1"},
+ {file = "ddtrace-2.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27c6e8c3b1bf642ca74afe985985450f2ca18e686ecb4f2e0ab978ae5fc03f8f"},
+ {file = "ddtrace-2.7.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0399c670229651517338c456304a2a65ce54387b8ddecf2da7011b259c0817d"},
+ {file = "ddtrace-2.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c81a89236f3ea91ad0e9da1fef32d9420c0d4614a44ef0a2cab168444cdb0d8"},
+ {file = "ddtrace-2.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:06d840db3283999ddacc3c9d8f5e5f0e0692ce635500d51f5e7e7ed2109c989a"},
+ {file = "ddtrace-2.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e242cab1c2a153e418060f66e477e21b45cd33843959a6d000f3f9ea8a9c06a"},
+ {file = "ddtrace-2.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b51e2230d805873974af882c19026030f40aee14a8d1b55d378443659ff4463f"},
+ {file = "ddtrace-2.7.2-cp38-cp38-win32.whl", hash = "sha256:7c589ee49644d6c022928ebe49e4586b22ac40f8f841d67e01eeda4a6f61cea0"},
+ {file = "ddtrace-2.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:0974c8f36f0f1be229befede438ba91c1da715abd68091c0c0e21ec4d3d85f79"},
+ {file = "ddtrace-2.7.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b602703fff34f3397df22fdc1184fc039d89e8c5b07cc2bcc330c9b83bcc6ad6"},
+ {file = "ddtrace-2.7.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:81ff83a9cdc033175780379d83af4bb03785bfd3c71672954f00c5a7f8d0d63b"},
+ {file = "ddtrace-2.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6f7e2fa06c61f9a26b253898654a97b49b805942aee19fb7c4b95e17105c6a5"},
+ {file = "ddtrace-2.7.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0140acfd73449e8cfa090e322f76ff85f385ce4337111ed2780cd2ee62e5e4b"},
+ {file = "ddtrace-2.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9af429e4c48cae2fb6a9a51cdb6ccc2dc0cabbc9905c1ce6e9062335da0b9db"},
+ {file = "ddtrace-2.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:09330db7a2c0ed91d244ef653f0aa261153dc0820874923c325058352b5278fd"},
+ {file = "ddtrace-2.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8638ddb94d77bdf55cc64718af66b172c4ff677b57c9e59dfd9dc8f630fb3169"},
+ {file = "ddtrace-2.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0ba0668e439134b3f258ddcc3e5c1d1d8848a40954087288312557b455b6967e"},
+ {file = "ddtrace-2.7.2-cp39-cp39-win32.whl", hash = "sha256:aa3c927299aa134ccaf8821eb7284366c60e29a542d0e7738e0b7dd9182b2025"},
+ {file = "ddtrace-2.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:7e3f36e91d1a91fb083258b09fa7f887a295321b4dc928630ce748ec664e70be"},
+ {file = "ddtrace-2.7.2.tar.gz", hash = "sha256:89a0b4b30220aeb68c2845fa21e51ec9bf915a1893cf003850b9d8022e7cb72a"},
]
[package.dependencies]
@@ -1107,18 +1108,18 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc
[[package]]
name = "filelock"
-version = "3.13.1"
+version = "3.13.3"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
- {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
- {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
+ {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"},
+ {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"},
]
[package.extras]
-docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
@@ -1459,13 +1460,13 @@ pbr = "*"
[[package]]
name = "jsii"
-version = "1.95.0"
+version = "1.96.0"
description = "Python client for jsii runtime"
optional = false
python-versions = "~=3.8"
files = [
- {file = "jsii-1.95.0-py3-none-any.whl", hash = "sha256:456fe823847e1ebbb49ab4737b181cdb8eeb623058de4669283fba7bb261e8f4"},
- {file = "jsii-1.95.0.tar.gz", hash = "sha256:f2461a0c1fdd6bb400caac5825596b0068a8c3adb9184549366adcf272815172"},
+ {file = "jsii-1.96.0-py3-none-any.whl", hash = "sha256:7101f25ba9ccc66f7bbee710ece1f4f66e7c418fcdab9e8996b6573dd9bd096b"},
+ {file = "jsii-1.96.0.tar.gz", hash = "sha256:b331f22a105ba437fa171e790eebd40d19f45773c32e55b8f904413ee6d7b2d9"},
]
[package.dependencies]
@@ -1821,13 +1822,13 @@ mkdocs = ">=0.17"
[[package]]
name = "mkdocs-material"
-version = "9.5.13"
+version = "9.5.15"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mkdocs_material-9.5.13-py3-none-any.whl", hash = "sha256:5cbe17fee4e3b4980c8420a04cc762d8dc052ef1e10532abd4fce88e5ea9ce6a"},
- {file = "mkdocs_material-9.5.13.tar.gz", hash = "sha256:d8e4caae576312a88fd2609b81cf43d233cdbe36860d67a68702b018b425bd87"},
+ {file = "mkdocs_material-9.5.15-py3-none-any.whl", hash = "sha256:e5c96dec3d19491de49ca643fc1dbb92b278e43cdb816c775bc47db77d9b62fb"},
+ {file = "mkdocs_material-9.5.15.tar.gz", hash = "sha256:39f03cca45e82bf54eb7456b5a18bd252eabfdd67f237a229471484a0a4d4635"},
]
[package.dependencies]
@@ -1902,38 +1903,38 @@ dill = ">=0.3.8"
[[package]]
name = "mypy"
-version = "1.8.0"
+version = "1.9.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
- {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
- {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
- {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
- {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
- {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
- {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
- {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
- {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
- {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
- {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
- {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
- {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
- {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
- {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
- {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
- {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
- {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
- {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
- {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
- {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
- {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
- {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
- {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
- {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
- {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
- {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
+ {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"},
+ {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"},
+ {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"},
+ {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"},
+ {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"},
+ {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"},
+ {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"},
+ {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"},
+ {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"},
+ {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"},
+ {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"},
+ {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"},
+ {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"},
+ {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"},
+ {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"},
+ {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"},
+ {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"},
+ {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"},
+ {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"},
+ {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"},
+ {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"},
+ {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"},
+ {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"},
+ {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"},
+ {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"},
+ {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"},
+ {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"},
]
[package.dependencies]
@@ -1949,13 +1950,13 @@ reports = ["lxml"]
[[package]]
name = "mypy-boto3-appconfig"
-version = "1.34.0"
-description = "Type annotations for boto3.AppConfig 1.34.0 service generated with mypy-boto3-builder 7.21.0"
+version = "1.34.58"
+description = "Type annotations for boto3.AppConfig 1.34.58 service generated with mypy-boto3-builder 7.23.2"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-appconfig-1.34.0.tar.gz", hash = "sha256:9a10372e74bdba9c88f3d5cf8dd4ce6932966c731117fc48867625e37daceb1e"},
- {file = "mypy_boto3_appconfig-1.34.0-py3-none-any.whl", hash = "sha256:426cdb7fb01f186eab062f0cca41b3a5bdfdcd33843eb9cebc04116155fce819"},
+ {file = "mypy-boto3-appconfig-1.34.58.tar.gz", hash = "sha256:7f4ef77171240f2ce43de38f725852d0ee9956f12660f9063cc5eb003f0b904e"},
+ {file = "mypy_boto3_appconfig-1.34.58-py3-none-any.whl", hash = "sha256:5fe5b74bed5b61f563df1d2876ea40ac52bdd39a157c1ac0b34645a73523a7b2"},
]
[package.dependencies]
@@ -1977,13 +1978,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-cloudformation"
-version = "1.34.55"
-description = "Type annotations for boto3.CloudFormation 1.34.55 service generated with mypy-boto3-builder 7.23.2"
+version = "1.34.66"
+description = "Type annotations for boto3.CloudFormation 1.34.66 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-cloudformation-1.34.55.tar.gz", hash = "sha256:de9f4d45d0a4e57bb5ef8b8d7a03476f8ef24ffa253d636cd824cde061227439"},
- {file = "mypy_boto3_cloudformation-1.34.55-py3-none-any.whl", hash = "sha256:78c4d25ef7102bd02c8e87d32211ff60a7b474ec546e9948b46f170d341ae0a2"},
+ {file = "mypy-boto3-cloudformation-1.34.66.tar.gz", hash = "sha256:295ddb878f3f4b61489b83bd4bad949382deb8a478ff5ae66c22ca3ffbabbe95"},
+ {file = "mypy_boto3_cloudformation-1.34.66-py3-none-any.whl", hash = "sha256:e0080e4ba0cd464479f3ee285faddd863e40147e8f331b78445aef210afc91ed"},
]
[package.dependencies]
@@ -2005,13 +2006,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-dynamodb"
-version = "1.34.57"
-description = "Type annotations for boto3.DynamoDB 1.34.57 service generated with mypy-boto3-builder 7.23.2"
+version = "1.34.67"
+description = "Type annotations for boto3.DynamoDB 1.34.67 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-dynamodb-1.34.57.tar.gz", hash = "sha256:8ad0eaa33811eb7eb0f8e2bcc2dbcefa86c91d7a21b13af500c3a8bacbe99db0"},
- {file = "mypy_boto3_dynamodb-1.34.57-py3-none-any.whl", hash = "sha256:3f957d604d7289834e549c236d77c229ab8fab107b8a2c9f8a07066348fbd3a3"},
+ {file = "mypy-boto3-dynamodb-1.34.67.tar.gz", hash = "sha256:09447ef3ea6bdfe0be4e32ca23283820573341d340bea3065ded2153cc593d22"},
+ {file = "mypy_boto3_dynamodb-1.34.67-py3-none-any.whl", hash = "sha256:081ee9e184c0c2d93f648b25cec798e75533af26e631fbe80259f48fddb89758"},
]
[package.dependencies]
@@ -2019,13 +2020,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-lambda"
-version = "1.34.46"
-description = "Type annotations for boto3.Lambda 1.34.46 service generated with mypy-boto3-builder 7.23.1"
+version = "1.34.58"
+description = "Type annotations for boto3.Lambda 1.34.58 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-lambda-1.34.46.tar.gz", hash = "sha256:275297944c5e36a170b37ce70229f21db6dd3561606799f18d96e36ac5df6876"},
- {file = "mypy_boto3_lambda-1.34.46-py3-none-any.whl", hash = "sha256:a12232002e04ee06b413b47068bc6bb085aeaa3693d28e9bf0efd76fa6953a0b"},
+ {file = "mypy-boto3-lambda-1.34.58.tar.gz", hash = "sha256:903822c74bd1b34748eb2d72eab0f132fbc3b392c8041aa8fe4d9552a44b0c65"},
+ {file = "mypy_boto3_lambda-1.34.58-py3-none-any.whl", hash = "sha256:6ab1b9611ff396e9310ad77f02994a6e03d40c8a0eea51085e355b4fd2a0cbc9"},
]
[package.dependencies]
@@ -2033,13 +2034,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-logs"
-version = "1.34.36"
-description = "Type annotations for boto3.CloudWatchLogs 1.34.36 service generated with mypy-boto3-builder 7.23.1"
+version = "1.34.66"
+description = "Type annotations for boto3.CloudWatchLogs 1.34.66 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-logs-1.34.36.tar.gz", hash = "sha256:1549b54da88a869852458e186e589449d53a7805354b5afd7d53b4d275c53c2e"},
- {file = "mypy_boto3_logs-1.34.36-py3-none-any.whl", hash = "sha256:c58cc7d498e24b963d4c78700a3e82d5af82e62d6694e30237c166a1fd6d326f"},
+ {file = "mypy-boto3-logs-1.34.66.tar.gz", hash = "sha256:cf5fac4801dd92f05007fb1b4444ff98258544d1f21e64e9228e34188046f841"},
+ {file = "mypy_boto3_logs-1.34.66-py3-none-any.whl", hash = "sha256:53c4988f655e21d2834dadcc600f3c182f34924c37d7a25bbd1b10857acb8b18"},
]
[package.dependencies]
@@ -2047,13 +2048,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-s3"
-version = "1.34.14"
-description = "Type annotations for boto3.S3 1.34.14 service generated with mypy-boto3-builder 7.21.0"
+version = "1.34.65"
+description = "Type annotations for boto3.S3 1.34.65 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-s3-1.34.14.tar.gz", hash = "sha256:71c39ab0623cdb442d225b71c1783f6a513cff4c4a13505a2efbb2e3aff2e965"},
- {file = "mypy_boto3_s3-1.34.14-py3-none-any.whl", hash = "sha256:f9669ecd182d5bf3532f5f2dcc5e5237776afe157ad5a0b37b26d6bec5fcc432"},
+ {file = "mypy-boto3-s3-1.34.65.tar.gz", hash = "sha256:2fcdf412ce2924b2f0b34db59abf06a9c0bbe4cd3361f14f0d2c1e211c0f7ddd"},
+ {file = "mypy_boto3_s3-1.34.65-py3-none-any.whl", hash = "sha256:2aecfbe1c00654bc21f839068218d60123366954bf43a708baa50f9543e3f205"},
]
[package.dependencies]
@@ -2061,13 +2062,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-secretsmanager"
-version = "1.34.43"
-description = "Type annotations for boto3.SecretsManager 1.34.43 service generated with mypy-boto3-builder 7.23.1"
+version = "1.34.63"
+description = "Type annotations for boto3.SecretsManager 1.34.63 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-secretsmanager-1.34.43.tar.gz", hash = "sha256:abbf560775c2fe0dc383b7f70c16a1bf753d9b3ffc0caa5e35447e685783a68b"},
- {file = "mypy_boto3_secretsmanager-1.34.43-py3-none-any.whl", hash = "sha256:64e9df58f71072f0a912ecaca626683f4536da078caa204ac07928c4b1481b8b"},
+ {file = "mypy-boto3-secretsmanager-1.34.63.tar.gz", hash = "sha256:a193373d718f747d53878b5cc7fef2e9b8fde3892e4788ac690fd2b16a35564c"},
+ {file = "mypy_boto3_secretsmanager-1.34.63-py3-none-any.whl", hash = "sha256:3ed3587f9b5a5eb4e8c81edeeaa0b4a9118d8f07cfc4915c137f8edc315028ab"},
]
[package.dependencies]
@@ -2075,13 +2076,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""}
[[package]]
name = "mypy-boto3-ssm"
-version = "1.34.47"
-description = "Type annotations for boto3.SSM 1.34.47 service generated with mypy-boto3-builder 7.23.1"
+version = "1.34.61"
+description = "Type annotations for boto3.SSM 1.34.61 service generated with mypy-boto3-builder 7.23.2"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-boto3-ssm-1.34.47.tar.gz", hash = "sha256:be70cc32f9a07e6701746ebe65fba14d59c3f24a8511d275fd8322c9365f2270"},
- {file = "mypy_boto3_ssm-1.34.47-py3-none-any.whl", hash = "sha256:6517b1dc01e3ffe48a251c91e2a7fb6801223baf4a8cf1600411f9e132422297"},
+ {file = "mypy-boto3-ssm-1.34.61.tar.gz", hash = "sha256:4cbc99f42b6913c536c6cc41c02d3f165920d1e7babb9bb17782b410556cde00"},
+ {file = "mypy_boto3_ssm-1.34.61-py3-none-any.whl", hash = "sha256:8ce8d6fc185c07161b5cc3a589df10130d6bbe3182aeeb01549eb95b5580f51c"},
]
[package.dependencies]
@@ -2395,13 +2396,13 @@ extra = ["pygments (>=2.12)"]
[[package]]
name = "pytest"
-version = "8.0.2"
+version = "8.1.1"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"},
- {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"},
+ {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
+ {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
]
[package.dependencies]
@@ -2409,21 +2410,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
-pluggy = ">=1.3.0,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+pluggy = ">=1.4,<2.0"
+tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
-version = "0.23.5"
+version = "0.23.6"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"},
- {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"},
+ {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"},
+ {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"},
]
[package.dependencies]
@@ -2455,13 +2456,13 @@ histogram = ["pygal", "pygaljs"]
[[package]]
name = "pytest-cov"
-version = "4.1.0"
+version = "5.0.0"
description = "Pytest plugin for measuring coverage."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
- {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
]
[package.dependencies]
@@ -2469,21 +2470,21 @@ coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
-testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-mock"
-version = "3.12.0"
+version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
- {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
]
[package.dependencies]
-pytest = ">=5.0"
+pytest = ">=6.2.5"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
@@ -2664,17 +2665,17 @@ toml = ["tomli (>=2.0.1)"]
[[package]]
name = "redis"
-version = "5.0.2"
+version = "5.0.3"
description = "Python client for Redis database and key-value store"
optional = false
python-versions = ">=3.7"
files = [
- {file = "redis-5.0.2-py3-none-any.whl", hash = "sha256:4caa8e1fcb6f3c0ef28dba99535101d80934b7d4cd541bbb47f4a3826ee472d1"},
- {file = "redis-5.0.2.tar.gz", hash = "sha256:3f82cc80d350e93042c8e6e7a5d0596e4dd68715babffba79492733e1f367037"},
+ {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"},
+ {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"},
]
[package.dependencies]
-async-timeout = ">=4.0.3"
+async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""}
[package.extras]
hiredis = ["hiredis (>=1.0.0)"]
@@ -2960,28 +2961,28 @@ files = [
[[package]]
name = "ruff"
-version = "0.3.0"
+version = "0.3.4"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7deb528029bacf845bdbb3dbb2927d8ef9b4356a5e731b10eef171e3f0a85944"},
- {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e1e0d4381ca88fb2b73ea0766008e703f33f460295de658f5467f6f229658c19"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f7dbba46e2827dfcb0f0cc55fba8e96ba7c8700e0a866eb8cef7d1d66c25dcb"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23dbb808e2f1d68eeadd5f655485e235c102ac6f12ad31505804edced2a5ae77"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ef655c51f41d5fa879f98e40c90072b567c666a7114fa2d9fe004dffba00932"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d0d3d7ef3d4f06433d592e5f7d813314a34601e6c5be8481cccb7fa760aa243e"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b08b356d06a792e49a12074b62222f9d4ea2a11dca9da9f68163b28c71bf1dd4"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9343690f95710f8cf251bee1013bf43030072b9f8d012fbed6ad702ef70d360a"},
- {file = "ruff-0.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1f3ed501a42f60f4dedb7805fa8d4534e78b4e196f536bac926f805f0743d49"},
- {file = "ruff-0.3.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:cc30a9053ff2f1ffb505a585797c23434d5f6c838bacfe206c0e6cf38c921a1e"},
- {file = "ruff-0.3.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5da894a29ec018a8293d3d17c797e73b374773943e8369cfc50495573d396933"},
- {file = "ruff-0.3.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:755c22536d7f1889be25f2baf6fedd019d0c51d079e8417d4441159f3bcd30c2"},
- {file = "ruff-0.3.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd73fe7f4c28d317855da6a7bc4aa29a1500320818dd8f27df95f70a01b8171f"},
- {file = "ruff-0.3.0-py3-none-win32.whl", hash = "sha256:19eacceb4c9406f6c41af806418a26fdb23120dfe53583df76d1401c92b7c14b"},
- {file = "ruff-0.3.0-py3-none-win_amd64.whl", hash = "sha256:128265876c1d703e5f5e5a4543bd8be47c73a9ba223fd3989d4aa87dd06f312f"},
- {file = "ruff-0.3.0-py3-none-win_arm64.whl", hash = "sha256:e3a4a6d46aef0a84b74fcd201a4401ea9a6cd85614f6a9435f2d33dd8cefbf83"},
- {file = "ruff-0.3.0.tar.gz", hash = "sha256:0886184ba2618d815067cf43e005388967b67ab9c80df52b32ec1152ab49f53a"},
+ {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"},
+ {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"},
+ {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"},
+ {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"},
+ {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"},
+ {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"},
+ {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"},
+ {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"},
]
[[package]]
@@ -3018,13 +3019,13 @@ pbr = "*"
[[package]]
name = "sentry-sdk"
-version = "1.41.0"
+version = "1.43.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = "*"
files = [
- {file = "sentry-sdk-1.41.0.tar.gz", hash = "sha256:4f2d6c43c07925d8cd10dfbd0970ea7cb784f70e79523cca9dbcd72df38e5a46"},
- {file = "sentry_sdk-1.41.0-py2.py3-none-any.whl", hash = "sha256:be4f8f4b29a80b6a3b71f0f31487beb9e296391da20af8504498a328befed53f"},
+ {file = "sentry-sdk-1.43.0.tar.gz", hash = "sha256:41df73af89d22921d8733714fb0fc5586c3461907e06688e6537d01a27e0e0f6"},
+ {file = "sentry_sdk-1.43.0-py2.py3-none-any.whl", hash = "sha256:8d768724839ca18d7b4c7463ef7528c40b7aa2bfbf7fe554d5f9a7c044acfd36"},
]
[package.dependencies]
@@ -3038,6 +3039,7 @@ asyncpg = ["asyncpg (>=0.23)"]
beam = ["apache-beam (>=2.12)"]
bottle = ["bottle (>=0.12.13)"]
celery = ["celery (>=3)"]
+celery-redbeat = ["celery-redbeat (>=2)"]
chalice = ["chalice (>=1.16.0)"]
clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
django = ["django (>=1.8)"]
@@ -3048,6 +3050,7 @@ grpcio = ["grpcio (>=1.21.1)"]
httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"]
loguru = ["loguru (>=0.5)"]
+openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"]
pure-eval = ["asttokens", "executing", "pure-eval"]
@@ -3230,24 +3233,24 @@ cryptography = ">=35.0.0"
[[package]]
name = "types-python-dateutil"
-version = "2.8.19.20240106"
+version = "2.9.0.20240316"
description = "Typing stubs for python-dateutil"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"},
- {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"},
+ {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"},
+ {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"},
]
[[package]]
name = "types-redis"
-version = "4.6.0.20240218"
+version = "4.6.0.20240311"
description = "Typing stubs for redis"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-redis-4.6.0.20240218.tar.gz", hash = "sha256:5103d7e690e5c74c974a161317b2d59ac2303cf8bef24175b04c2a4c3486cb39"},
- {file = "types_redis-4.6.0.20240218-py3-none-any.whl", hash = "sha256:dc9c45a068240e33a04302aec5655cf41e80f91eecffccbb2df215b2f6fc375d"},
+ {file = "types-redis-4.6.0.20240311.tar.gz", hash = "sha256:e049bbdff0e0a1f8e701b64636811291d21bff79bf1e7850850a44055224a85f"},
+ {file = "types_redis-4.6.0.20240311-py3-none-any.whl", hash = "sha256:6b9d68a29aba1ee400c823d8e5fe88675282eb69d7211e72fe65dbe54b33daca"},
]
[package.dependencies]
@@ -3512,4 +3515,4 @@ validation = ["fastjsonschema"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8,<4.0.0"
-content-hash = "70538aa75ee90d7aef93e61ec119c372fecbf957931dd640def961eb817bf7e2"
+content-hash = "06ded72b67c32472b583b6be33c596f2caf71327cf931280f8aa5c7a4314898b"
diff --git a/pyproject.toml b/pyproject.toml
index ab551763915..c999883c258 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "2.35.0"
+version = "2.36.0"
description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity."
authors = ["Amazon Web Services"]
include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]
@@ -51,48 +51,48 @@ jsonpath-ng = { version = "^1.6.0", optional = true }
[tool.poetry.dev-dependencies]
coverage = { extras = ["toml"], version = "^7.4" }
-pytest = "^8.0.1"
-black = "^24.2"
+pytest = "^8.1.1"
+black = "^24.3"
boto3 = "^1.26.164"
isort = "^5.13.2"
-pytest-cov = "^4.1.0"
-pytest-mock = "^3.11.1"
+pytest-cov = "^5.0.0"
+pytest-mock = "^3.14.0"
pdoc3 = "^0.10.0"
-pytest-asyncio = "^0.23.5"
-bandit = "^1.7.5"
+pytest-asyncio = "^0.23.6"
+bandit = "^1.7.8"
radon = "^6.0.1"
xenon = "^0.9.1"
mkdocs-git-revision-date-plugin = "^0.3.2"
mike = "^1.1.2"
pytest-xdist = "^3.5.0"
-aws-cdk-lib = "^2.131.0"
+aws-cdk-lib = "^2.133.0"
"aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0"
"aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0"
"aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0"
-"aws-cdk.aws-lambda-python-alpha" = "^2.130.0a0"
-"cdklabs.generative-ai-cdk-constructs" = "^0.1.83"
+"aws-cdk.aws-lambda-python-alpha" = "^2.133.0a0"
+"cdklabs.generative-ai-cdk-constructs" = "^0.1.104"
pytest-benchmark = "^4.0.0"
-mypy-boto3-appconfig = "^1.34.0"
-mypy-boto3-cloudformation = "^1.34.32"
+mypy-boto3-appconfig = "^1.34.58"
+mypy-boto3-cloudformation = "^1.34.66"
mypy-boto3-cloudwatch = "^1.34.40"
-mypy-boto3-dynamodb = "^1.34.57"
-mypy-boto3-lambda = "^1.34.46"
-mypy-boto3-logs = "^1.34.16"
-mypy-boto3-secretsmanager = "^1.34.43"
-mypy-boto3-ssm = "^1.34.47"
-mypy-boto3-s3 = "^1.34.14"
+mypy-boto3-dynamodb = "^1.34.67"
+mypy-boto3-lambda = "^1.34.58"
+mypy-boto3-logs = "^1.34.66"
+mypy-boto3-secretsmanager = "^1.34.63"
+mypy-boto3-ssm = "^1.34.61"
+mypy-boto3-s3 = "^1.34.65"
mypy-boto3-xray = "^1.34.0"
types-requests = "^2.31.0"
typing-extensions = "^4.6.2"
-mkdocs-material = "^9.5.13"
-filelock = "^3.12.2"
+mkdocs-material = "^9.5.15"
+filelock = "^3.13.3"
checksumdir = "^1.2.0"
mypy-boto3-appconfigdata = "^1.34.24"
ijson = "^3.2.2"
typed-ast = { version = "^1.5.5", python = "< 3.8" }
hvac = "^2.1.0"
aws-requests-auth = "^0.4.3"
-datadog-lambda = "^5.89.0"
+datadog-lambda = "^5.91.0"
[tool.poetry.extras]
parser = ["pydantic"]
@@ -110,12 +110,12 @@ datadog = ["datadog-lambda"]
datamasking = ["aws-encryption-sdk", "jsonpath-ng"]
[tool.poetry.group.dev.dependencies]
-cfn-lint = "0.86.0"
+cfn-lint = "0.86.1"
mypy = "^1.1.1"
types-python-dateutil = "^2.8.19.6"
httpx = ">=0.23.3,<0.28.0"
sentry-sdk = "^1.22.2"
-ruff = ">=0.0.272,<0.3.1"
+ruff = ">=0.0.272,<0.3.5"
retry2 = "^0.9.5"
pytest-socket = ">=0.6,<0.8"
types-redis = "^4.6.0.7"
diff --git a/tests/events/cloudWatchAlarmEventCompositeMetric.json b/tests/events/cloudWatchAlarmEventCompositeMetric.json
new file mode 100644
index 00000000000..67200c10edb
--- /dev/null
+++ b/tests/events/cloudWatchAlarmEventCompositeMetric.json
@@ -0,0 +1,30 @@
+{
+ "source":"aws.cloudwatch",
+ "alarmArn":"arn:aws:cloudwatch:us-east-1:111122223333:alarm:SuppressionDemo.Main",
+ "accountId":"111122223333",
+ "time":"2023-08-04T12:56:46.138+0000",
+ "region":"us-east-1",
+ "alarmData":{
+ "alarmName":"CompositeDemo.Main",
+ "state":{
+ "value":"ALARM",
+ "reason":"arn:aws:cloudwatch:us-east-1:111122223333:alarm:CompositeDemo.FirstChild transitioned to ALARM at Friday 04 August, 2023 12:54:46 UTC",
+ "reasonData":"{\"triggeringAlarms\":[{\"arn\":\"arn:aws:cloudwatch:us-east-1:111122223333:alarm:CompositeDemo.FirstChild\",\"state\":{\"value\":\"ALARM\",\"timestamp\":\"2023-08-04T12:54:46.138+0000\"}}]}",
+ "timestamp":"2023-08-04T12:56:46.138+0000"
+ },
+ "previousState":{
+ "value":"ALARM",
+ "reason":"arn:aws:cloudwatch:us-east-1:111122223333:alarm:CompositeDemo.FirstChild transitioned to ALARM at Friday 04 August, 2023 12:54:46 UTC",
+ "reasonData":"{\"triggeringAlarms\":[{\"arn\":\"arn:aws:cloudwatch:us-east-1:111122223333:alarm:CompositeDemo.FirstChild\",\"state\":{\"value\":\"ALARM\",\"timestamp\":\"2023-08-04T12:54:46.138+0000\"}}]}",
+ "timestamp":"2023-08-04T12:54:46.138+0000",
+ "actionsSuppressedBy":"WaitPeriod",
+ "actionsSuppressedReason":"Actions suppressed by WaitPeriod"
+ },
+ "configuration":{
+ "alarmRule":"ALARM(CompositeDemo.FirstChild) OR ALARM(CompositeDemo.SecondChild)",
+ "actionsSuppressor":"CompositeDemo.ActionsSuppressor",
+ "actionsSuppressorWaitPeriod":120,
+ "actionsSuppressorExtensionPeriod":180
+ }
+ }
+}
diff --git a/tests/events/cloudWatchAlarmEventSingleMetric.json b/tests/events/cloudWatchAlarmEventSingleMetric.json
new file mode 100644
index 00000000000..fa5089cd6b5
--- /dev/null
+++ b/tests/events/cloudWatchAlarmEventSingleMetric.json
@@ -0,0 +1,59 @@
+{
+ "source": "aws.cloudwatch",
+ "alarmArn": "arn:aws:cloudwatch:eu-west-1:912397435824:alarm:test_alarm",
+ "accountId": "123456789012",
+ "time": "2024-02-17T11:53:08.431+0000",
+ "region": "eu-west-1",
+ "alarmData": {
+ "alarmName": "Test alert",
+ "state": {
+ "value": "ALARM",
+ "reason": "Threshold Crossed: 1 out of the last 1 datapoints [1.0 (17/02/24 11:51:00)] was less than the threshold (10.0) (minimum 1 datapoint for OK -> ALARM transition).",
+ "reasonData": "{\"version\":\"1.0\",\"queryDate\":\"2024-02-17T11:53:08.423+0000\",\"startDate\":\"2024-02-17T11:51:00.000+0000\",\"statistic\":\"SampleCount\",\"period\":60,\"recentDatapoints\":[1.0],\"threshold\":10.0,\"evaluatedDatapoints\":[{\"timestamp\":\"2024-02-17T11:51:00.000+0000\",\"sampleCount\":1.0,\"value\":1.0}]}",
+ "timestamp": "2024-02-17T11:53:08.431+0000"
+ },
+ "previousState": {
+ "value": "OK",
+ "reason": "Threshold Crossed: 1 out of the last 1 datapoints [1.0 (17/02/24 11:50:00)] was not greater than the threshold (10.0) (minimum 1 datapoint for ALARM -> OK transition).",
+ "reasonData": "{\"version\":\"1.0\",\"queryDate\":\"2024-02-17T11:51:31.460+0000\",\"startDate\":\"2024-02-17T11:50:00.000+0000\",\"statistic\":\"SampleCount\",\"period\":60,\"recentDatapoints\":[1.0],\"threshold\":10.0,\"evaluatedDatapoints\":[{\"timestamp\":\"2024-02-17T11:50:00.000+0000\",\"sampleCount\":1.0,\"value\":1.0}]}",
+ "timestamp": "2024-02-17T11:51:31.462+0000"
+ },
+ "configuration": {
+ "description": "This is description **here**",
+ "metrics": [
+ {
+ "id": "e1",
+ "expression": "m1/m2",
+ "label": "Expression1",
+ "returnData": true
+ },
+ {
+ "id": "m1",
+ "metricStat": {
+ "metric": {
+ "namespace": "AWS/Lambda",
+ "name": "Invocations",
+ "dimensions": {}
+ },
+ "period": 60,
+ "stat": "SampleCount"
+ },
+ "returnData": false
+ },
+ {
+ "id": "m2",
+ "metricStat": {
+ "metric": {
+ "namespace": "AWS/Lambda",
+ "name": "Duration",
+ "dimensions": {}
+ },
+ "period": 60,
+ "stat": "SampleCount"
+ },
+ "returnData": false
+ }
+ ]
+ }
+ }
+}
diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py
index fa166bac77e..3929496be50 100644
--- a/tests/functional/event_handler/test_api_gateway.py
+++ b/tests/functional/event_handler/test_api_gateway.py
@@ -1504,6 +1504,36 @@ def get_lambda(param: int): ...
assert result["body"] == '{"msg":"Invalid data. Number of errors: 1"}'
+def test_exception_handler_with_route():
+ app = ApiGatewayResolver()
+ # GIVEN a Router object with an exception handler defined for ValueError
+ router = Router()
+
+ @router.exception_handler(ValueError)
+ def handle_value_error(ex: ValueError):
+ print(f"request path is '{app.current_event.path}'")
+ return Response(
+ status_code=418,
+ content_type=content_types.TEXT_HTML,
+ body=str(ex),
+ )
+
+ @router.get("/my/path")
+ def get_lambda() -> Response:
+ raise ValueError("Foo!")
+
+ app.include_router(router)
+
+ # WHEN calling the event handler
+ # AND a ValueError is raised
+ result = app(LOAD_GW_EVENT, {})
+
+ # THEN call the exception_handler from Router
+ assert result["statusCode"] == 418
+ assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML]
+ assert result["body"] == "Foo!"
+
+
def test_data_validation_error():
# GIVEN a resolver without an exception handler
app = ApiGatewayResolver(enable_validation=True)
diff --git a/tests/functional/event_handler/test_openapi_params.py b/tests/functional/event_handler/test_openapi_params.py
index 38b0cbed307..2ac9c036f3f 100644
--- a/tests/functional/event_handler/test_openapi_params.py
+++ b/tests/functional/event_handler/test_openapi_params.py
@@ -52,7 +52,7 @@ def handler():
assert JSON_CONTENT_TYPE in response.content
json_response = response.content[JSON_CONTENT_TYPE]
- assert json_response.schema_ == Schema()
+ assert json_response.schema_ is None
assert not json_response.examples
assert not json_response.encoding
diff --git a/tests/functional/event_handler/test_openapi_responses.py b/tests/functional/event_handler/test_openapi_responses.py
index be5d9bca288..21a71d7dee3 100644
--- a/tests/functional/event_handler/test_openapi_responses.py
+++ b/tests/functional/event_handler/test_openapi_responses.py
@@ -50,8 +50,34 @@ def handler():
assert 202 in responses.keys()
assert responses[202].description == "Custom response"
- assert 200 not in responses.keys()
- assert 422 not in responses.keys()
+ assert 200 not in responses.keys() # 200 was not added due to custom responses
+ assert 422 in responses.keys() # 422 is always added due to potential data validation errors
+
+
+def test_openapi_422_default_response():
+ app = APIGatewayRestResolver(enable_validation=True)
+
+ @app.get("/")
+ def handler():
+ return {"message": "hello world"}
+
+ schema = app.get_openapi_schema()
+ responses = schema.paths["/"].get.responses
+ assert 422 in responses.keys()
+ assert responses[422].description == "Validation Error"
+
+
+def test_openapi_422_custom_response():
+ app = APIGatewayRestResolver(enable_validation=True)
+
+ @app.get("/", responses={422: {"description": "Custom validation response"}})
+ def handler():
+ return {"message": "hello world"}
+
+ schema = app.get_openapi_schema()
+ responses = schema.paths["/"].get.responses
+ assert 422 in responses.keys()
+ assert responses[422].description == "Custom validation response"
def test_openapi_200_custom_schema():
diff --git a/tests/functional/event_handler/test_openapi_swagger.py b/tests/functional/event_handler/test_openapi_swagger.py
index 45e908742b4..82c9b4874d0 100644
--- a/tests/functional/event_handler/test_openapi_swagger.py
+++ b/tests/functional/event_handler/test_openapi_swagger.py
@@ -18,6 +18,18 @@ def test_openapi_swagger():
assert result["multiValueHeaders"]["Content-Type"] == ["text/html"]
+def test_openapi_swagger_compressed():
+ app = APIGatewayRestResolver(enable_validation=True)
+ app.enable_swagger(compress=True)
+ LOAD_GW_EVENT["headers"] = {"Accept-Encoding": "gzip, deflate, br"}
+ LOAD_GW_EVENT["path"] = "/swagger"
+ result = app(LOAD_GW_EVENT, {})
+ assert result["statusCode"] == 200
+ assert result["isBase64Encoded"]
+ assert result["multiValueHeaders"]["Content-Type"] == ["text/html"]
+ assert result["multiValueHeaders"]["Content-Encoding"] == ["gzip"]
+
+
def test_openapi_swagger_with_custom_base_url():
app = APIGatewayRestResolver(enable_validation=True)
app.enable_swagger(swagger_base_url="https://aws.amazon.com")
diff --git a/tests/functional/metrics/test_metrics_cloudwatch_emf.py b/tests/functional/metrics/test_metrics_cloudwatch_emf.py
index d3da81798b6..a3dfa518400 100644
--- a/tests/functional/metrics/test_metrics_cloudwatch_emf.py
+++ b/tests/functional/metrics/test_metrics_cloudwatch_emf.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import datetime
import json
import warnings
from collections import namedtuple
@@ -56,6 +57,7 @@ def serialize_single_metric(
dimension: Dict,
namespace: str,
metadata: Dict | None = None,
+ timestamp: int | datetime.datetime | None = None,
) -> CloudWatchEMFOutput:
"""Helper function to build EMF object from a given metric, dimension and namespace"""
my_metrics = AmazonCloudWatchEMFProvider(namespace=namespace)
@@ -65,6 +67,9 @@ def serialize_single_metric(
if metadata is not None:
my_metrics.add_metadata(**metadata)
+ if timestamp:
+ my_metrics.set_timestamp(timestamp)
+
return my_metrics.serialize_metric_set()
@@ -142,6 +147,28 @@ def test_single_metric_default_dimensions(capsys, metric, dimension, namespace):
assert expected == output
+def test_single_metric_with_custom_timestamp(capsys, metric, dimension, namespace):
+ # GIVEN we provide a custom timestamp
+ # WHEN using single_metric context manager
+
+ default_dimensions = {dimension["name"]: dimension["value"]}
+
+ timestamp = int((datetime.datetime.now() - datetime.timedelta(days=2)).timestamp() * 1000)
+ with single_metric(
+ namespace=namespace,
+ default_dimensions=default_dimensions,
+ **metric,
+ ) as my_metric:
+ my_metric.set_timestamp(timestamp)
+ my_metric.add_metric(name="second_metric", unit="Count", value=1)
+
+ output = capture_metrics_output(capsys)
+ expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace, timestamp=timestamp)
+
+ # THEN we should have custom timestamp added to the metric
+ assert expected == output
+
+
def test_single_metric_default_dimensions_inherit(capsys, metric, dimension, namespace):
# GIVEN we provide Metrics default dimensions
# WHEN using single_metric context manager
@@ -1213,3 +1240,75 @@ def lambda_handler(evt, ctx):
output = capture_metrics_output_multiple_emf_objects(capsys)
assert len(output) == 2
+
+
+@pytest.mark.parametrize(
+ "timestamp",
+ [int((datetime.datetime.now() - datetime.timedelta(days=2)).timestamp() * 1000), 1711105187000],
+)
+def test_metric_with_custom_timestamp(namespace, metric, capsys, timestamp):
+ # GIVEN Metrics instance is initialized
+ my_metrics = Metrics(namespace=namespace)
+
+ # Calculate the metric timestamp as 2 days before the current time
+ metric_timestamp = timestamp
+
+ # WHEN we set custom timestamp before to flush the metric
+ @my_metrics.log_metrics
+ def lambda_handler(evt, ctx):
+ my_metrics.add_metric(**metric)
+ my_metrics.set_timestamp(metric_timestamp)
+
+ lambda_handler({}, {})
+ invocation = capture_metrics_output(capsys)
+
+ # THEN Timestamp must be the custom value
+ assert invocation["_aws"]["Timestamp"] == metric_timestamp
+
+
+def test_metric_custom_timestamp_more_than_14days_ago(namespace, metric):
+ # GIVEN Metrics instance is initialized
+ my_metrics = Metrics(namespace=namespace)
+
+ # Setting timestamp outside of contraints with 20 days before
+ metric_timestamp = int((datetime.datetime.now() - datetime.timedelta(days=20)).timestamp() * 1000)
+
+ # WHEN we set a wrong timestamp before to flush the metric
+ @my_metrics.log_metrics
+ def lambda_handler(evt, ctx):
+ my_metrics.add_metric(**metric)
+ my_metrics.set_timestamp(metric_timestamp)
+
+ # THEN should raise a warning
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("default")
+ lambda_handler({}, {})
+ assert len(w) == 1
+ assert str(w[-1].message) == (
+ "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. "
+ "Ensure the timestamp is within 14 days past or 2 hours future."
+ )
+
+
+def test_metric_custom_timestamp_with_wrong_type(namespace, metric):
+ # GIVEN Metrics instance is initialized
+ my_metrics = Metrics(namespace=namespace)
+
+ # Setting timestamp outside of contraints with 20 days before
+ metric_timestamp = "timestamp_as_string"
+
+ # WHEN we set a wrong timestamp before to flush the metric
+ @my_metrics.log_metrics
+ def lambda_handler(evt, ctx):
+ my_metrics.add_metric(**metric)
+ my_metrics.set_timestamp(metric_timestamp)
+
+ # THEN should raise a warning
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("default")
+ lambda_handler({}, {})
+ assert len(w) == 1
+ assert str(w[-1].message) == (
+ "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. "
+ "Ensure the timestamp is within 14 days past or 2 hours future."
+ )
diff --git a/tests/functional/test_tracing.py b/tests/functional/test_tracing.py
index b330ab6316f..5f48b233d91 100644
--- a/tests/functional/test_tracing.py
+++ b/tests/functional/test_tracing.py
@@ -73,6 +73,35 @@ def handler(event, context):
handler({}, {})
+def test_tracer_lambda_running_in_sam_cli(monkeypatch, dummy_response):
+ # GIVEN tracer runs in AWS SAM CLI (ie: `AWS_SAM_LOCAL` is set)
+ monkeypatch.setenv("AWS_SAM_LOCAL", "true")
+ monkeypatch.setenv("LAMBDA_TASK_ROOT", "/opt/")
+ tracer = Tracer()
+
+ # WHEN a lambda function is run through SAM CLI emulator
+ @tracer.capture_lambda_handler
+ def handler(event, context):
+ return dummy_response
+
+ # THEN tracer should run in disabled mode, and not raise an Exception
+ handler({}, {})
+
+
+def test_tracer_lambda_running_in_chalice(monkeypatch, dummy_response):
+ # GIVEN tracer runs in CHALICE (ie: `AWS_CHALICE_CLI_MODE` is set)
+ monkeypatch.setenv("AWS_CHALICE_CLI_MODE", "true")
+ tracer = Tracer()
+
+ # WHEN a lambda function is run through SAM CLI emulator
+ @tracer.capture_lambda_handler
+ def handler(event, context):
+ return dummy_response
+
+ # THEN tracer should run in disabled mode, and not raise an Exception
+ handler({}, {})
+
+
def test_tracer_metadata_disabled(dummy_response):
# GIVEN tracer is disabled, and annotations/metadata are used
tracer = Tracer(disabled=True)
diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py
index e146d65744f..8ea2fac7bc5 100644
--- a/tests/functional/test_utilities_batch.py
+++ b/tests/functional/test_utilities_batch.py
@@ -28,6 +28,7 @@
from aws_lambda_powertools.utilities.parser.models import (
DynamoDBStreamChangedRecordModel,
DynamoDBStreamRecordModel,
+ SqsRecordModel,
)
from aws_lambda_powertools.utilities.parser.types import Literal
from tests.functional.batch.sample_models import (
@@ -38,6 +39,32 @@
from tests.functional.utils import b64_to_str, str_to_b64
+@pytest.fixture(scope="module")
+def sqs_event_fifo_factory() -> Callable:
+ def factory(body: str, message_group_id: str = ""):
+ return {
+ "messageId": f"{uuid.uuid4()}",
+ "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a",
+ "body": body,
+ "attributes": {
+ "ApproximateReceiveCount": "1",
+ "SentTimestamp": "1703675223472",
+ "SequenceNumber": "18882884930918384133",
+ "MessageGroupId": message_group_id,
+ "SenderId": "SenderId",
+ "MessageDeduplicationId": "1eea03c3f7e782c7bdc2f2a917f40389314733ff39f5ab16219580c0109ade98",
+ "ApproximateFirstReceiveTimestamp": "1703675223484",
+ },
+ "messageAttributes": {},
+ "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
+ "eventSource": "aws:sqs",
+ "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
+ "awsRegion": "us-east-1",
+ }
+
+ return factory
+
+
@pytest.fixture(scope="module")
def sqs_event_factory() -> Callable:
def factory(body: str):
@@ -48,7 +75,7 @@ def factory(body: str):
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082649183",
- "SenderId": "AIDAIENQZJOLO23YVJ4VO",
+ "SenderId": "SenderId",
"ApproximateFirstReceiveTimestamp": "1545082649185",
},
"messageAttributes": {},
@@ -660,10 +687,10 @@ def lambda_handler(event, context):
assert "All records failed processing. " in str(e.value)
-def test_sqs_fifo_batch_processor_middleware_success_only(sqs_event_factory, record_handler):
+def test_sqs_fifo_batch_processor_middleware_success_only(sqs_event_fifo_factory, record_handler):
# GIVEN
- first_record = SQSRecord(sqs_event_factory("success"))
- second_record = SQSRecord(sqs_event_factory("success"))
+ first_record = SQSRecord(sqs_event_fifo_factory("success"))
+ second_record = SQSRecord(sqs_event_fifo_factory("success"))
event = {"Records": [first_record.raw_event, second_record.raw_event]}
processor = SqsFifoPartialProcessor()
@@ -679,12 +706,12 @@ def lambda_handler(event, context):
assert result["batchItemFailures"] == []
-def test_sqs_fifo_batch_processor_middleware_with_failure(sqs_event_factory, record_handler):
+def test_sqs_fifo_batch_processor_middleware_with_failure(sqs_event_fifo_factory, record_handler):
# GIVEN
- first_record = SQSRecord(sqs_event_factory("success"))
- second_record = SQSRecord(sqs_event_factory("fail"))
+ first_record = SQSRecord(sqs_event_fifo_factory("success"))
+ second_record = SQSRecord(sqs_event_fifo_factory("fail"))
# this would normally succeed, but since it's a FIFO queue, it will be marked as failure
- third_record = SQSRecord(sqs_event_factory("success"))
+ third_record = SQSRecord(sqs_event_fifo_factory("success"))
event = {"Records": [first_record.raw_event, second_record.raw_event, third_record.raw_event]}
processor = SqsFifoPartialProcessor()
@@ -702,6 +729,120 @@ def lambda_handler(event, context):
assert result["batchItemFailures"][1]["itemIdentifier"] == third_record.message_id
+def test_sqs_fifo_batch_processor_middleware_with_skip_group_on_error(sqs_event_fifo_factory, record_handler):
+ # GIVEN a batch of 5 records with 3 different MessageGroupID
+ first_record = SQSRecord(sqs_event_fifo_factory("success", "1"))
+ second_record = SQSRecord(sqs_event_fifo_factory("success", "1"))
+ third_record = SQSRecord(sqs_event_fifo_factory("fail", "2"))
+ fourth_record = SQSRecord(sqs_event_fifo_factory("success", "2"))
+ fifth_record = SQSRecord(sqs_event_fifo_factory("fail", "3"))
+ event = {
+ "Records": [
+ first_record.raw_event,
+ second_record.raw_event,
+ third_record.raw_event,
+ fourth_record.raw_event,
+ fifth_record.raw_event,
+ ],
+ }
+
+ # WHEN the FIFO processor is set to continue processing even after encountering errors in specific MessageGroupID
+ processor = SqsFifoPartialProcessor(skip_group_on_error=True)
+
+ @batch_processor(record_handler=record_handler, processor=processor)
+ def lambda_handler(event, context):
+ return processor.response()
+
+ # WHEN
+ result = lambda_handler(event, {})
+
+ # THEN only failed messages should originate from MessageGroupID 3
+ assert len(result["batchItemFailures"]) == 3
+ assert result["batchItemFailures"][0]["itemIdentifier"] == third_record.message_id
+ assert result["batchItemFailures"][1]["itemIdentifier"] == fourth_record.message_id
+ assert result["batchItemFailures"][2]["itemIdentifier"] == fifth_record.message_id
+
+
+def test_sqs_fifo_batch_processor_middleware_with_skip_group_on_error_first_message_fail(
+ sqs_event_fifo_factory,
+ record_handler,
+):
+ # GIVEN a batch of 5 records with 3 different MessageGroupID
+ first_record = SQSRecord(sqs_event_fifo_factory("fail", "1"))
+ second_record = SQSRecord(sqs_event_fifo_factory("success", "1"))
+ third_record = SQSRecord(sqs_event_fifo_factory("fail", "2"))
+ fourth_record = SQSRecord(sqs_event_fifo_factory("success", "2"))
+ fifth_record = SQSRecord(sqs_event_fifo_factory("success", "3"))
+ event = {
+ "Records": [
+ first_record.raw_event,
+ second_record.raw_event,
+ third_record.raw_event,
+ fourth_record.raw_event,
+ fifth_record.raw_event,
+ ],
+ }
+
+ # WHEN the FIFO processor is set to continue processing even after encountering errors in specific MessageGroupID
+ processor = SqsFifoPartialProcessor(skip_group_on_error=True)
+
+ @batch_processor(record_handler=record_handler, processor=processor)
+ def lambda_handler(event, context):
+ return processor.response()
+
+ # WHEN the handler is onvoked
+ result = lambda_handler(event, {})
+
+ # THEN messages from group 1 and 2 should fail, but not group 3
+ assert len(result["batchItemFailures"]) == 4
+ assert result["batchItemFailures"][0]["itemIdentifier"] == first_record.message_id
+ assert result["batchItemFailures"][1]["itemIdentifier"] == second_record.message_id
+ assert result["batchItemFailures"][2]["itemIdentifier"] == third_record.message_id
+ assert result["batchItemFailures"][3]["itemIdentifier"] == fourth_record.message_id
+
+
+def test_sqs_fifo_batch_processor_middleware_with_skip_group_on_error_and_model(sqs_event_fifo_factory, record_handler):
+ # GIVEN a batch of 5 records with 3 different MessageGroupID
+ first_record = SQSRecord(sqs_event_fifo_factory("success", "1"))
+ second_record = SQSRecord(sqs_event_fifo_factory("success", "1"))
+ third_record = SQSRecord(sqs_event_fifo_factory("fail", "2"))
+ fourth_record = SQSRecord(sqs_event_fifo_factory("success", "2"))
+ fifth_record = SQSRecord(sqs_event_fifo_factory("fail", "3"))
+ event = {
+ "Records": [
+ first_record.raw_event,
+ second_record.raw_event,
+ third_record.raw_event,
+ fourth_record.raw_event,
+ fifth_record.raw_event,
+ ],
+ }
+
+ class OrderSqsRecord(SqsRecordModel):
+ receiptHandle: str
+
+ # WHEN the FIFO processor is set to continue processing even after encountering errors in specific MessageGroupID
+ # WHEN processor is using a Pydantic Model we must be able to access MessageGroupID property
+ processor = SqsFifoPartialProcessor(skip_group_on_error=True, model=OrderSqsRecord)
+
+ def record_handler(record: OrderSqsRecord):
+ if record.body == "fail":
+ raise ValueError("blah")
+
+ @batch_processor(record_handler=record_handler, processor=processor)
+ def lambda_handler(event, context):
+ return processor.response()
+
+ # WHEN
+ result = lambda_handler(event, {})
+
+ # THEN only failed messages should originate from MessageGroupID 3
+ assert len(result["batchItemFailures"]) == 3
+ assert result["batchItemFailures"][0]["itemIdentifier"] == third_record.message_id
+ assert result["batchItemFailures"][1]["itemIdentifier"] == fourth_record.message_id
+ assert result["batchItemFailures"][2]["itemIdentifier"] == fifth_record.message_id
+
+
def test_async_batch_processor_middleware_success_only(sqs_event_factory, async_record_handler):
# GIVEN
first_record = SQSRecord(sqs_event_factory("success"))
diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py
index 5ff043f7ed3..334b3d37ea5 100644
--- a/tests/functional/test_utilities_parameters.py
+++ b/tests/functional/test_utilities_parameters.py
@@ -4,6 +4,7 @@
import json
import random
import string
+import uuid
from datetime import datetime, timedelta
from io import BytesIO
from typing import Any, Dict, List, Tuple
@@ -511,6 +512,373 @@ def test_ssm_provider_get(mock_name, mock_value, mock_version, config):
stubber.deactivate()
+def test_set_parameter(monkeypatch, mock_name, mock_value):
+ """
+ Test set_parameter()
+ """
+
+ class TestProvider(BaseProvider):
+ def set(self, name: str, value: Any, *, overwrite: bool = False, **kwargs) -> str:
+ assert name == mock_name
+ return mock_value
+
+ def _get(self, name: str, **kwargs) -> str:
+ raise NotImplementedError()
+
+ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
+ raise NotImplementedError()
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ value = parameters.set_parameter(name=mock_name, value=mock_value)
+
+ assert value == mock_value
+
+
+def test_ssm_provider_set_parameter(mock_name, mock_value, mock_version, config):
+ """
+ Test SSMProvider.set_parameter() with a non-cached value
+ """
+ # GIVEN a SSMProvider instance with default values
+ provider = parameters.SSMProvider(config=config)
+
+ # WHEN setting a parameter
+ stubber = stub.Stubber(provider.client)
+ response = {"Version": mock_version, "Tier": "Standard"}
+ expected_params = {
+ "Name": mock_name,
+ "Value": mock_value,
+ "Type": "String",
+ "Overwrite": False,
+ "Description": "",
+ "Tier": "Standard",
+ }
+ stubber.add_response("put_parameter", response, expected_params)
+ stubber.activate()
+
+ # THEN it should return values
+ try:
+ assert provider.set(name=mock_name, value=mock_value) == response
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_set_parameter_default_config(monkeypatch, mock_name, mock_value, mock_version):
+ """
+ Test SSMProvider._set() without specifying the config
+ """
+ monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-2")
+
+ # GIVEN a SSMProvider instance with default values
+ provider = parameters.SSMProvider()
+
+ # WHEN setting a parameter
+ stubber = stub.Stubber(provider.client)
+ response = {"Version": mock_version, "Tier": "Advanced"}
+ expected_params = {
+ "Name": mock_name,
+ "Value": mock_value,
+ "Type": "String",
+ "Overwrite": False,
+ "Tier": "Standard",
+ "Description": "",
+ }
+ stubber.add_response("put_parameter", response, expected_params)
+ stubber.activate()
+
+ # THEN it should return values
+ try:
+ assert provider.set(name=mock_name, value=mock_value) == response
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_set_parameter_with_custom_options(monkeypatch, mock_name, mock_value, mock_version):
+ """
+ Test SSMProvider._set() with custom options
+ """
+
+ monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-2")
+
+ # GIVEN a SSMProvider instance
+ provider = parameters.SSMProvider()
+
+ # WHEN using custom parameters
+ stubber = stub.Stubber(provider.client)
+ response = {"Version": mock_version, "Tier": "Advanced"}
+ expected_params = {
+ "Name": mock_name,
+ "Value": mock_value,
+ "Type": "SecureString",
+ "Overwrite": True,
+ "Tier": "Advanced",
+ "Description": "Parameter",
+ "KeyId": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab",
+ }
+ stubber.add_response("put_parameter", response, expected_params)
+ stubber.activate()
+
+ # THEN it should return values
+ try:
+ version = provider.set(
+ name=mock_name,
+ value=mock_value,
+ tier="Advanced",
+ parameter_type="SecureString",
+ overwrite=True,
+ description="Parameter",
+ kms_key_id="arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab",
+ )
+
+ assert version == response
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_set_parameter_raise_on_failure(mock_name, mock_value, mock_version, config):
+ """
+ Test SSMProvider.set_parameter() with failure
+ """
+ # GIVEN a SSMProvider instance
+ provider = parameters.SSMProvider(config=config)
+
+ # Stub the boto3 client
+ stubber = stub.Stubber(provider.client)
+ response = {"Version": mock_version, "Tier": "Standard"}
+ expected_params = {
+ "Name": mock_name,
+ "Value": mock_value,
+ "Type": "String",
+ "Overwrite": False,
+ "Description": "",
+ "Tier": "NoTier",
+ }
+ stubber.add_response("put_parameter", response, expected_params)
+ stubber.activate()
+
+ # WHEN cannot set a Parameter with tier=NoTier
+ # THEN raise SetParameterError
+ with pytest.raises(parameters.exceptions.SetParameterError, match="Error setting parameter*"):
+ try:
+ provider.set(name=mock_name, value=mock_value)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_set_secret(monkeypatch, mock_name, mock_value):
+ """
+ Test set_secret()
+ """
+
+ # GIVEN a mock implementation of BaseProvider set method
+ class TestProvider(BaseProvider):
+ def set(self, name: str, value: Any, *, overwrite: bool = False, **kwargs) -> str:
+ assert name == mock_name
+ return mock_value
+
+ def _get(self, name: str, **kwargs) -> str:
+ raise NotImplementedError()
+
+ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
+ raise NotImplementedError()
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "secrets", TestProvider())
+
+ # WHEN set_secret function is called
+ value = parameters.set_secret(name=mock_name, value=mock_value)
+
+ # THEN it should return the mock_value
+ assert value == mock_value
+
+
+def test_secret_provider_update_secret_with_plain_text_value(mock_name, mock_value, config):
+ """
+ Test SecretsProvider.set() with a plain text value
+ """
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ client_request_token = str(uuid.uuid4())
+
+ # WHEN setting a secret with a plain text value
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "SecretId": mock_name,
+ "SecretString": mock_value,
+ "ClientRequestToken": client_request_token,
+ }
+ stubber.add_response("put_secret_value", response, expected_params)
+ stubber.activate()
+
+ # THEN it should call put_secret_value with the plain text value and the client request token
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value, client_request_token=client_request_token)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_secret_provider_update_secret_with_binary_value(mock_name, config):
+ """
+ Test SecretsProvider.set() with a binary value
+ """
+
+ mock_value = b"value_to_test"
+
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ # WHEN setting a secret with a binary value
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "SecretId": mock_name,
+ "SecretBinary": mock_value,
+ }
+ stubber.add_response("put_secret_value", response, expected_params)
+ stubber.activate()
+
+ # THEN it should call put_secret_value with the binary value
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_secret_provider_update_secret_with_dict_value(mock_name, config):
+ """
+ Test SecretsProvider.set() with a dict value
+ """
+
+ mock_value = {"key": "powertools"}
+
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ # WHEN setting a secret with a dictionary value
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "SecretId": mock_name,
+ "SecretString": json.dumps(mock_value),
+ }
+ stubber.add_response("put_secret_value", response, expected_params)
+ stubber.activate()
+
+ # THEN it should encode the dictionary as JSON and call put_secret_value with the encoded value
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_secret_provider_update_secret_with_raise_on_failure(mock_name, mock_value, config):
+ """
+ Test SecretsProvider.set() with raise on failure
+ """
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ # Stub the boto3 client
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "SecretName": mock_name,
+ "SecretString": mock_value,
+ }
+ stubber.add_response("put_secret_value", response, expected_params)
+ stubber.activate()
+
+ # WHEN cannot update a Secret with wrong parameter
+ # THEN raise SetSecretError
+ with pytest.raises(parameters.exceptions.SetSecretError, match="Error setting secret*"):
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_secret_provider_create_secret(mocker, mock_name, mock_value, config):
+ """
+ Test Test SecretsProvider.set() forcing a new secret creation
+ """
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ # WHEN the put_secret_value method raises a ResourceNotFoundException
+ mock_update_secret = mocker.patch.object(provider, "_update_secret")
+ mock_update_secret.side_effect = provider.client.exceptions.ResourceNotFoundException(
+ {"Error": {"Code": "ResourceNotFoundException"}},
+ "put_secret_value",
+ )
+
+ # WHEN setting values for a new secret
+ client_request_token = str(uuid.uuid4())
+ # Stub the boto3 client
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "Name": mock_name,
+ "SecretString": mock_value,
+ "ClientRequestToken": client_request_token,
+ }
+
+ # THEN it should call create_secret
+ stubber.add_response("create_secret", response, expected_params)
+ stubber.activate()
+
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value, client_request_token=client_request_token)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_secret_provider_create_secret_raise_on_error(mocker, mock_name, mock_value, config):
+ """
+ Test Test SecretsProvider.set() forcing a new secret creation
+ """
+ # GIVEN a SecretsProvider instance
+ provider = parameters.SecretsProvider(config=config)
+
+ # WHEN the put_secret_value method raises a ResourceNotFoundException
+ mock_update_secret = mocker.patch.object(provider, "_update_secret")
+ mock_update_secret.side_effect = provider.client.exceptions.ResourceNotFoundException(
+ {"Error": {"Code": "ResourceNotFoundException"}},
+ "put_secret_value",
+ )
+
+ # WHEN setting values for a new secret with wrong parameters
+ client_request_token = str(uuid.uuid4())
+ # Stub the boto3 client
+ stubber = stub.Stubber(provider.client)
+ response = {"Name": mock_name, "ARN": f"arn:aws:secretsmanager:us-east-1:132456789012:secret/{mock_name}"}
+ expected_params = {
+ "NameSecret": mock_name,
+ "SecretString": mock_value,
+ "ClientRequestToken": client_request_token,
+ }
+ stubber.add_response("create_secret", response, expected_params)
+ stubber.activate()
+
+ # WHEN cannot update a Secret with wrong parameter
+ # THEN raise SetSecretError
+ with pytest.raises(parameters.exceptions.SetSecretError, match="Error setting secret*"):
+ try:
+ assert response == provider.set(name=mock_name, value=mock_value)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
def test_ssm_provider_get_with_custom_client(mock_name, mock_value, mock_version, config):
"""
Test SSMProvider.get() with a non-cached value
diff --git a/tests/unit/data_classes/test_cloud_watch_alarm_event.py b/tests/unit/data_classes/test_cloud_watch_alarm_event.py
new file mode 100644
index 00000000000..56933a1505d
--- /dev/null
+++ b/tests/unit/data_classes/test_cloud_watch_alarm_event.py
@@ -0,0 +1,104 @@
+import json
+from typing import Dict, List
+
+from aws_lambda_powertools.utilities.data_classes import CloudWatchAlarmEvent
+from tests.functional.utils import load_event
+
+
+def test_cloud_watch_alarm_event_single_metric():
+ raw_event = load_event("cloudWatchAlarmEventSingleMetric.json")
+ parsed_event = CloudWatchAlarmEvent(raw_event)
+
+ assert parsed_event.source == raw_event["source"]
+ assert parsed_event.region == raw_event["region"]
+ assert parsed_event.alarm_arn == raw_event["alarmArn"]
+ assert parsed_event.alarm_data.alarm_name == raw_event["alarmData"]["alarmName"]
+
+ assert parsed_event.alarm_data.state.value == raw_event["alarmData"]["state"]["value"]
+ assert parsed_event.alarm_data.state.reason == raw_event["alarmData"]["state"]["reason"]
+ assert parsed_event.alarm_data.state.reason_data == raw_event["alarmData"]["state"]["reasonData"]
+ assert parsed_event.alarm_data.state.reason_data_decoded == json.loads(
+ raw_event["alarmData"]["state"]["reasonData"],
+ )
+ assert parsed_event.alarm_data.state.timestamp == raw_event["alarmData"]["state"]["timestamp"]
+
+ assert parsed_event.alarm_data.previous_state.value == raw_event["alarmData"]["previousState"]["value"]
+ assert parsed_event.alarm_data.previous_state.reason == raw_event["alarmData"]["previousState"]["reason"]
+ assert parsed_event.alarm_data.previous_state.reason_data == raw_event["alarmData"]["previousState"]["reasonData"]
+ assert parsed_event.alarm_data.previous_state.reason_data_decoded == json.loads(
+ raw_event["alarmData"]["previousState"]["reasonData"],
+ )
+ assert parsed_event.alarm_data.previous_state.timestamp == raw_event["alarmData"]["previousState"]["timestamp"]
+
+ assert parsed_event.alarm_data.configuration.description == raw_event["alarmData"]["configuration"]["description"]
+ assert parsed_event.alarm_data.configuration.alarm_rule is None
+ assert parsed_event.alarm_data.configuration.alarm_actions_suppressor is None
+ assert parsed_event.alarm_data.configuration.alarm_actions_suppressor_extension_period is None
+ assert parsed_event.alarm_data.configuration.alarm_actions_suppressor_wait_period is None
+
+ assert isinstance(parsed_event.alarm_data.configuration.metrics, List)
+ # metric position 0
+ metric_0 = parsed_event.alarm_data.configuration.metrics[0]
+ raw_metric_0 = raw_event["alarmData"]["configuration"]["metrics"][0]
+ assert metric_0.metric_id == raw_metric_0["id"]
+ assert metric_0.expression == raw_metric_0["expression"]
+ assert metric_0.label == raw_metric_0["label"]
+ assert metric_0.return_data == raw_metric_0["returnData"]
+
+ # metric position 1
+ metric_1 = parsed_event.alarm_data.configuration.metrics[1]
+ raw_metric_1 = raw_event["alarmData"]["configuration"]["metrics"][1]
+ assert metric_1.metric_id == raw_metric_1["id"]
+ assert metric_1.return_data == raw_metric_1["returnData"]
+ assert metric_1.metric_stat.stat == raw_metric_1["metricStat"]["stat"]
+ assert metric_1.metric_stat.period == raw_metric_1["metricStat"]["period"]
+ assert metric_1.metric_stat.unit is None
+ assert isinstance(metric_1.metric_stat.metric, Dict)
+
+
+def test_cloud_watch_alarm_event_composite_metric():
+ raw_event = load_event("cloudWatchAlarmEventCompositeMetric.json")
+ parsed_event = CloudWatchAlarmEvent(raw_event)
+
+ assert parsed_event.source == raw_event["source"]
+ assert parsed_event.region == raw_event["region"]
+ assert parsed_event.alarm_arn == raw_event["alarmArn"]
+ assert parsed_event.alarm_data.alarm_name == raw_event["alarmData"]["alarmName"]
+
+ assert parsed_event.alarm_data.state.value == raw_event["alarmData"]["state"]["value"]
+ assert parsed_event.alarm_data.state.reason == raw_event["alarmData"]["state"]["reason"]
+ assert parsed_event.alarm_data.state.reason_data == raw_event["alarmData"]["state"]["reasonData"]
+ assert parsed_event.alarm_data.state.reason_data_decoded == json.loads(
+ raw_event["alarmData"]["state"]["reasonData"],
+ )
+ assert parsed_event.alarm_data.state.timestamp == raw_event["alarmData"]["state"]["timestamp"]
+
+ assert parsed_event.alarm_data.previous_state.value == raw_event["alarmData"]["previousState"]["value"]
+ assert parsed_event.alarm_data.previous_state.reason == raw_event["alarmData"]["previousState"]["reason"]
+ assert parsed_event.alarm_data.previous_state.reason_data == raw_event["alarmData"]["previousState"]["reasonData"]
+ assert parsed_event.alarm_data.previous_state.reason_data_decoded == json.loads(
+ raw_event["alarmData"]["previousState"]["reasonData"],
+ )
+ assert parsed_event.alarm_data.previous_state.timestamp == raw_event["alarmData"]["previousState"]["timestamp"]
+ assert (
+ parsed_event.alarm_data.previous_state.actions_suppressed_by
+ == raw_event["alarmData"]["previousState"]["actionsSuppressedBy"]
+ )
+ assert (
+ parsed_event.alarm_data.previous_state.actions_suppressed_reason
+ == raw_event["alarmData"]["previousState"]["actionsSuppressedReason"]
+ )
+
+ assert parsed_event.alarm_data.configuration.alarm_rule == raw_event["alarmData"]["configuration"]["alarmRule"]
+ assert (
+ parsed_event.alarm_data.configuration.alarm_actions_suppressor_wait_period
+ == raw_event["alarmData"]["configuration"]["actionsSuppressorWaitPeriod"]
+ )
+ assert (
+ parsed_event.alarm_data.configuration.alarm_actions_suppressor_extension_period
+ == raw_event["alarmData"]["configuration"]["actionsSuppressorExtensionPeriod"]
+ )
+ assert (
+ parsed_event.alarm_data.configuration.alarm_actions_suppressor
+ == raw_event["alarmData"]["configuration"]["actionsSuppressor"]
+ )
diff --git a/tests/unit/test_data_classes.py b/tests/unit/test_data_classes.py
index c8f0c1fc932..393bcdf250e 100644
--- a/tests/unit/test_data_classes.py
+++ b/tests/unit/test_data_classes.py
@@ -324,13 +324,6 @@ def test_base_proxy_event_get_header_value_case_insensitive():
assert value is None
-def test_base_proxy_event_json_body_key_error():
- event = BaseProxyEvent({})
- with pytest.raises(KeyError) as ke:
- assert not event.json_body
- assert str(ke.value) == "'body'"
-
-
def test_base_proxy_event_json_body():
data = {"message": "Foo"}
event = BaseProxyEvent({"body": json.dumps(data)})
@@ -338,13 +331,6 @@ def test_base_proxy_event_json_body():
assert event.json_body["message"] == "Foo"
-def test_base_proxy_event_decode_body_key_error():
- event = BaseProxyEvent({})
- with pytest.raises(KeyError) as ke:
- assert not event.decoded_body
- assert str(ke.value) == "'body'"
-
-
def test_base_proxy_event_decode_body_encoded_false():
data = "Foo"
event = BaseProxyEvent({"body": data, "isBase64Encoded": False})
diff --git a/tests/unit/test_shared_functions.py b/tests/unit/test_shared_functions.py
index c8c4bb2afb2..b286c536249 100644
--- a/tests/unit/test_shared_functions.py
+++ b/tests/unit/test_shared_functions.py
@@ -15,6 +15,7 @@
resolve_env_var_choice,
resolve_max_age,
resolve_truthy_env_var_choice,
+ sanitize_xray_segment_name,
strtobool,
)
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
@@ -175,3 +176,27 @@ def test_abs_lambda_path_w_filename_envvar(default_lambda_path):
os.environ["LAMBDA_TASK_ROOT"] = default_lambda_path
# Then path = env + relative_path
assert abs_lambda_path(relative_path="cert/pub.cert") == str(Path(os.environ["LAMBDA_TASK_ROOT"], relative_path))
+
+
+def test_sanitize_xray_segment_name():
+ # GIVEN a name with invalid characters
+ invalid_name = "app?;*.lambda_function.().get_todos!$~^<>"
+
+ # WHEN we sanitize this name by removing invalid characters
+ sanitized_name = sanitize_xray_segment_name(invalid_name)
+
+ # THEN the sanitized name should not contain invalid characters
+ expected_name = "app.lambda_function.locals.get_todos"
+ assert sanitized_name == expected_name
+
+
+def test_sanitize_xray_segment_name_with_no_special_characters():
+ # GIVEN a name without any invalid characters
+ valid_name = "app#lambda_function"
+
+ # WHEN we sanitize this name
+ sanitized_name = sanitize_xray_segment_name(valid_name)
+
+ # THEN the sanitized name remains the same as the original name
+ expected_name = valid_name
+ assert sanitized_name == expected_name
diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py
index 7b09bcde885..0d12afa629b 100644
--- a/tests/unit/test_tracing.py
+++ b/tests/unit/test_tracing.py
@@ -127,10 +127,10 @@ def greeting(name, message):
# and use service name as a metadata namespace
assert in_subsegment_mock.in_subsegment.call_count == 1
assert in_subsegment_mock.in_subsegment.call_args == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_method..greeting",
+ name=f"## {MODULE_PREFIX}.test_tracer_method.locals.greeting",
)
assert in_subsegment_mock.put_metadata.call_args == mocker.call(
- key=f"{MODULE_PREFIX}.test_tracer_method..greeting response",
+ key=f"{MODULE_PREFIX}.test_tracer_method.locals.greeting response",
value=dummy_response,
namespace="booking",
)
@@ -261,8 +261,7 @@ def greeting(name, message):
# and their service name as the namespace
put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1]
assert (
- put_metadata_mock_args["key"]
- == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata..greeting error"
+ put_metadata_mock_args["key"] == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata.locals.greeting error"
)
assert put_metadata_mock_args["namespace"] == "booking"
@@ -316,20 +315,20 @@ async def greeting(name, message):
# THEN we should add metadata for each response like we would for a sync decorated method
assert in_subsegment_mock.in_subsegment.call_count == 2
assert in_subsegment_greeting_call_args == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async..greeting",
+ name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async.locals.greeting",
)
assert in_subsegment_greeting2_call_args == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async..greeting_2",
+ name=f"## {MODULE_PREFIX}.test_tracer_method_nested_async.locals.greeting_2",
)
assert in_subsegment_mock.put_metadata.call_count == 2
assert put_metadata_greeting2_call_args == mocker.call(
- key=f"{MODULE_PREFIX}.test_tracer_method_nested_async..greeting_2 response",
+ key=f"{MODULE_PREFIX}.test_tracer_method_nested_async.locals.greeting_2 response",
value=dummy_response,
namespace="booking",
)
assert put_metadata_greeting_call_args == mocker.call(
- key=f"{MODULE_PREFIX}.test_tracer_method_nested_async..greeting response",
+ key=f"{MODULE_PREFIX}.test_tracer_method_nested_async.locals.greeting response",
value=dummy_response,
namespace="booking",
)
@@ -375,7 +374,7 @@ async def greeting(name, message):
put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1]
assert (
put_metadata_mock_args["key"]
- == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata_async..greeting error"
+ == f"{MODULE_PREFIX}.test_tracer_method_exception_metadata_async.locals.greeting error"
)
assert put_metadata_mock_args["namespace"] == "booking"
@@ -409,7 +408,7 @@ def handler(event, context):
assert in_subsegment_mock.in_subsegment.call_count == 2
assert handler_trace == mocker.call(name="## handler")
assert yield_function_trace == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_yield_from_context_manager..yield_with_capture",
+ name=f"## {MODULE_PREFIX}.test_tracer_yield_from_context_manager.locals.yield_with_capture",
)
assert "test result" in result
@@ -436,7 +435,7 @@ def yield_with_capture():
put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1]
assert (
put_metadata_mock_args["key"]
- == f"{MODULE_PREFIX}.test_tracer_yield_from_context_manager_exception_metadata..yield_with_capture error" # noqa E501
+ == f"{MODULE_PREFIX}.test_tracer_yield_from_context_manager_exception_metadata.locals.yield_with_capture error" # noqa E501
)
assert isinstance(put_metadata_mock_args["value"], ValueError)
assert put_metadata_mock_args["namespace"] == "booking"
@@ -480,7 +479,7 @@ def handler(event, context):
assert in_subsegment_mock.in_subsegment.call_count == 2
assert handler_trace == mocker.call(name="## handler")
assert yield_function_trace == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_yield_from_nested_context_manager..yield_with_capture",
+ name=f"## {MODULE_PREFIX}.test_tracer_yield_from_nested_context_manager.locals.yield_with_capture",
)
assert "test result" in result
@@ -512,7 +511,7 @@ def handler(event, context):
assert in_subsegment_mock.in_subsegment.call_count == 2
assert handler_trace == mocker.call(name="## handler")
assert generator_fn_trace == mocker.call(
- name=f"## {MODULE_PREFIX}.test_tracer_yield_from_generator..generator_fn",
+ name=f"## {MODULE_PREFIX}.test_tracer_yield_from_generator.locals.generator_fn",
)
assert "test result" in result
@@ -538,7 +537,7 @@ def generator_fn():
put_metadata_mock_args = in_subsegment_mock.put_metadata.call_args[1]
assert (
put_metadata_mock_args["key"]
- == f"{MODULE_PREFIX}.test_tracer_yield_from_generator_exception_metadata..generator_fn error"
+ == f"{MODULE_PREFIX}.test_tracer_yield_from_generator_exception_metadata.locals.generator_fn error"
)
assert put_metadata_mock_args["namespace"] == "booking"
assert isinstance(put_metadata_mock_args["value"], ValueError)