diff --git a/.chglog/CHANGELOG.tpl.md b/.chglog/CHANGELOG.tpl.md new file mode 100755 index 00000000000..c2b5a55cdb4 --- /dev/null +++ b/.chglog/CHANGELOG.tpl.md @@ -0,0 +1,56 @@ +{{ if .Versions -}} + +## [Unreleased] + +{{ if .Unreleased.CommitGroups -}} +{{ range .Unreleased.CommitGroups -}} +### {{ .Title }} +{{ range .Commits -}} +- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} +{{ end }} +{{ end -}} +{{ end -}} +{{ end -}} + +{{ range .Versions }} + +## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }} +{{ range .CommitGroups -}} +### {{ .Title }} +{{ range .Commits -}} +- {{ if .Scope }}**{{ upperFirst .Scope }}:** {{ end }}{{ .Subject }} +{{ end }} +{{ end -}} + +{{- if .RevertCommits -}} +### Reverts +{{ range .RevertCommits -}} +- {{ .Revert.Header }} +{{ end }} +{{ end -}} + +{{- if .MergeCommits -}} +### Pull Requests +{{ range .MergeCommits -}} +- {{ .Header }} +{{ end }} +{{ end -}} + +{{- if .NoteGroups -}} +{{ range .NoteGroups -}} +### {{ .Title }} +{{ range .Notes }} +{{ .Body }} +{{ end }} +{{ end -}} +{{ end -}} +{{ end -}} + +{{- if .Versions }} +[Unreleased]: {{ .Info.RepositoryURL }}/compare/{{ $latest := index .Versions 0 }}{{ $latest.Tag.Name }}...HEAD +{{ range .Versions -}} +{{ if .Tag.Previous -}} +[{{ .Tag.Name }}]: {{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }} +{{ end -}} +{{ end -}} +{{ end -}} diff --git a/.chglog/config.yml b/.chglog/config.yml new file mode 100755 index 00000000000..294b3289981 --- /dev/null +++ b/.chglog/config.yml @@ -0,0 +1,32 @@ +style: github +template: CHANGELOG.tpl.md +info: + title: CHANGELOG + repository_url: https://github.com/awslabs/aws-lambda-powertools-python +options: + commits: + filters: + Type: + - feat + - fix + - perf + - refactor + - docs + - chore + commit_groups: + title_maps: + feat: Features + fix: Bug Fixes + perf: Performance Improvements + refactor: Code Refactoring + docs: Documentation + chore: Project maintenance + header: + pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$" + pattern_maps: + - Type + - Scope + - Subject + notes: + keywords: + - BREAKING CHANGE diff --git a/.github/auto-label.json b/.github/auto-label.json deleted file mode 100644 index 1218790bdc4..00000000000 --- a/.github/auto-label.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "rules": { - "area/utilities": ["aws_lambda_powertools_python/utilities/", "aws_lambda_powertools_python/utilities/", "aws_lambda_powertools_python/middleware_factory/"], - "area/logger": ["aws_lambda_powertools_python/logging/"], - "area/tracer": ["aws_lambda_powertools_python/tracing/"], - "area/metrics": ["aws_lambda_powertools_python/metrics/"], - "documentation": ["docs/", "mkdocs.yml"], - "internal": ["Makefile", "CHANGELOG.md", "CONTRIBUTING.md"] - } -} diff --git a/.github/auto-label_yml.inactive b/.github/auto-label_yml.inactive deleted file mode 100644 index 220587fab76..00000000000 --- a/.github/auto-label_yml.inactive +++ /dev/null @@ -1,36 +0,0 @@ -# NOTE to self: Reason it doesn't work it's due to Org restrictions and how GitHub exposes token to forks - -name: PR Auto Label -on: [pull_request] -#on: -# pull_request: -# types: [opened, synchronize] -# types: [pull_request] - -jobs: -# auto-label: -# name: PR Auto Label -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v2 -# - uses: banyan/auto-label@1.2 -# env: -# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - size-label: - name: PR diff size label - runs-on: ubuntu-latest - steps: - - name: size-label - uses: codelytv/pr-size-labeler@v1 - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - xs_max_size: '10' - s_max_size: '100' - m_max_size: '500' - l_max_size: '1000' - fail_if_xl: 'false' - message_if_xl: > - 'This PR exceeds the recommended size of 1000 lines. - Please make sure you are NOT addressing multiple issues with one PR. - Note this PR might be rejected due to its size.’ - github_api_url: 'api.github.com' diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml new file mode 100644 index 00000000000..85dc7e0579d --- /dev/null +++ b/.github/boring-cyborg.yml @@ -0,0 +1,117 @@ +##### Labeler ########################################################################################################## +labelPRBasedOnFilePath: + area/utilities: + - aws_lambda_powertools/utilities/* + - aws_lambda_powertools/utilities/**/* + - aws_lambda_powertools/middleware_factory/* + - aws_lambda_powertools/middleware_factory/**/* + area/logger: + - aws_lambda_powertools/logging/* + - aws_lambda_powertools/logging/**/* + - aws_lambda_powertools/package_logger.py + area/tracer: + - aws_lambda_powertools/tracing/* + - aws_lambda_powertools/tracing/**/* + area/metrics: + - aws_lambda_powertools/metrics/* + - aws_lambda_powertools/metrics/**/* + area/event_handlers: + - aws_lambda_powertools/event_handler/* + - aws_lambda_powertools/event_handler/**/* + + documentation: + - docs/* + - docs/**/* + - mkdocs.yml + + internal: + - .github/* + - .github/**/* + - .chglog/* + - .flake8 + - .gitignore + - .pre-commit-config.yaml + - Makefile + - CONTRIBUTING.md + - CODE_OF_CONDUCT.md + - LICENSE + - aws_lambda_powertools_python/shared/* + - aws_lambda_powertools_python/shared/** + + dependencies: + - pyproject.toml + - poetry.lock + + tests: + - tests/* + - tests/**/* + - benchmark/* + - benchmark/**/* + +##### Greetings ######################################################################################################## +firstPRWelcomeComment: > + Thanks a lot for your first contribution! Please check out our contributing guidelines and don't hesitate to ask whatever you need. + +# Comment to be posted to congratulate user on their first merged PR +firstPRMergeComment: > + Awesome work, congrats on your first merged pull request and thank you for helping improve everyone's experience! + +# Comment to be posted to on first time issues +firstIssueWelcomeComment: > + Thanks for opening your first issue here! We'll come back to you as soon as we can. + + +###### IssueLink Adder ################################################################################################# +# Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. +#insertIssueLinkInPrDescription: +# # specify the placeholder for the issue link that should be present in the description +# descriptionIssuePlaceholderRegexp: "^Issue link: (.*)$" +# matchers: +# # you can have several matches - for different types of issues +# # only the first matching entry is replaced +# jiraIssueMatch: +# # specify the regexp of issue id that you can find in the title of the PR +# # the match groups can be used to build the issue id (${1}, ${2}, etc.). +# titleIssueIdRegexp: \[(AIRFLOW-[0-9]{4})\] +# # the issue link to be added. ${1}, ${2} ... are replaced with the match groups from the +# # title match (remember to use quotes) +# descriptionIssueLink: "[${1}](https://issues.apache.org/jira/browse/${1}/)" +# docOnlyIssueMatch: +# titleIssueIdRegexp: \[(AIRFLOW-X{4})\] +# descriptionIssueLink: "`Document only change, no JIRA issue`" + +###### Title Validator ################################################################################################# +# Verifies if commit/PR titles match the regexp specified +#verifyTitles: +# # Regular expression that should be matched by titles of commits or PR +# titleRegexp: ^\[AIRFLOW-[0-9]{4}\].*$|^\[AIRFLOW-XXXX\].*$ +# # If set to true, it will always check the PR title (as opposed to the individual commits). +# alwaysUsePrTitle: true +# # If set to true, it will only check the commit in case there is a single commit. +# # In case of multiple commits it will check PR title. +# # This reflects the standard behaviour of Github that for `Squash & Merge` GitHub +# # uses the PR title rather than commit messages for the squashed commit ¯\_(ツ)_/¯ +# # For single-commit PRs it takes the squashed commit message from the commit as expected. +# # +# # If set to false it will check all commit messages. This is useful when you do not squash commits at merge. +# validateEitherPrOrSingleCommitTitle: true +# # The title the GitHub status should appear from. +# statusTitle: "Title Validator" +# # A custom message to be displayed when the title passes validation. +# successMessage: "Validation successful!" +# # A custom message to be displayed when the title fails validation. +# # Allows insertion of ${type} (commit/PR), ${title} (the title validated) and ${regex} (the titleRegexp above). +# failureMessage: "Wrong ${type} title: ${title}" + +###### PR/Branch Up-To-Date Checker #################################################################################### +# Check if the branch is up to date with develop when certain files are modified +#checkUpToDate: +# # The default branch is "develop", change the branch if you want to check against a different target branch +# targetBranch: develop +# files: +# # File paths that you want to check for +# # In this example, it checks if the branch is up to date when alembic migrations are modified in the PR. +# # It helps avoid multiple heads in alembic migrations in a collaborative development project. +# - airflow/migrations/* +# - airflow/migrations/**/* +# - airflow/alembic.ini diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..e95edbee4e6 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,33 @@ +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + commit-message: + prefix: chore + include: scope + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + target-branch: "develop" + commit-message: + prefix: chore + include: scope + +# - package-ecosystem: "pip" +# directory: "/" +# schedule: +# interval: "daily" +# target-branch: "develop" +# update_types: +# - "semver:major" +# labels: +# - "do-not-merge" +# - "dependencies" +# commit-message: +# prefix: chore +# include: scope diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 00000000000..a98b7270199 --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,28 @@ +pull_request_rules: + - name: automatic merge for Dependabot pull requests + conditions: + - author~=^dependabot(|-preview)\[bot\]$ +# - check-success=build # matrix jobs aren't working in mergify + - -label~="do-not-merge" + - "#approved-reviews-by>=1" # until we exclude major versions in dependabot + actions: + merge: + strict: false + method: squash + commit_message: title+body + + - name: Automatic merge ⬇️ on approval ✔ + conditions: + - base!=master + - "#approved-reviews-by>=1" + - "#changes-requested-reviews-by=0" + - -title~=(WIP|wip) +# - check-success=build # matrix jobs aren't working in mergify + - check-success=Semantic Pull Request + - body~=(?m)^\[X\] Meet tenets criteria + actions: + merge: + strict: smart + method: squash + strict_method: merge + commit_message: title+body diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml index e2e0a63d205..44ad5a61779 100644 --- a/.github/release-drafter.yml +++ b/.github/release-drafter.yml @@ -20,9 +20,10 @@ categories: - title: '🚒 Deprecations' labels: - 'deprecated' - - title: '🔧 Internal' + - title: '🔧 Maintenance' labels: - 'internal' + - 'dependencies' exclude-labels: - 'skip-changelog' tag-template: 'v$NEXT_PATCH_VERSION' @@ -32,7 +33,7 @@ template: | **[Human readable summary of changes]** ## Changes - + $CHANGES ## This release was made possible by the following contributors: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 246992ec244..19794cad093 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,7 +4,7 @@ name: Publish to PyPi # # === Manual activities === # -# 1. Document human readable changes in CHANGELOG +# 1. Document human readable changes in CHANGELOG (pre-generate unreleased changes with `make changelog`) # 2. Bump package version using poetry version # 3. Merge version changes to develop branch # 4. Edit the current draft release notes @@ -35,7 +35,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: "3.8" - name: Set release notes tag diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index ddef9f6c527..0990d6d0152 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -1,4 +1,4 @@ -name: Build +name: Code quality on: pull_request: @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -43,4 +43,5 @@ jobs: # flags: unittests env_vars: OS,PYTHON name: aws-lambda-powertools-python-codecov - fail_ci_if_error: true + # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index dcdd409b835..dceee36b2f9 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -13,7 +13,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: "3.8" - name: Install dependencies diff --git a/.gitignore b/.gitignore index ace70c8192d..ce5e32bd3e1 100644 --- a/.gitignore +++ b/.gitignore @@ -295,6 +295,7 @@ test_report wheelhouse /.idea/* *.html +TMP_CHANGELOG.md # Docs files docs/.cache/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 5def3a04989..b2e950fa968 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,37 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## [1.17.0] - 2021-06-08 + +### Added + +* **Documentation**: Include new public roadmap ([#452](https://github.com/awslabs/aws-lambda-powertools-python/issues/452)) +* **Documentation**: Remove old todo in idempotency docs +* **Data classes:** New `AttributeValueType` to get type and value from data in `DynamoDBStreamEvent` ([#462](https://github.com/awslabs/aws-lambda-powertools-python/issues/462)) +* **Data classes:** New decorator `event_source` to instantiate data_classes ([#442](https://github.com/awslabs/aws-lambda-powertools-python/issues/442)) +* **Logger:** New `clear_state` parameter to clear previously added custom keys upon invocation ([#467](https://github.com/awslabs/aws-lambda-powertools-python/issues/467)) +* **Parser:** Support for API Gateway HTTP API [#434](https://github.com/awslabs/aws-lambda-powertools-python/issues/434) ([#441](https://github.com/awslabs/aws-lambda-powertools-python/issues/441)) + +### Maintenance + +* **deps**: bump xenon from 0.7.1 to 0.7.3 ([#446](https://github.com/awslabs/aws-lambda-powertools-python/issues/446)) +* assited changelog pre-generation, auto-label PR ([#443](https://github.com/awslabs/aws-lambda-powertools-python/issues/443)) +* enable dependabot for dep upgrades ([#444](https://github.com/awslabs/aws-lambda-powertools-python/issues/444)) +* enable mergify ([#450](https://github.com/awslabs/aws-lambda-powertools-python/issues/450)) +* **deps**: bump mkdocs-material from 7.1.5 to 7.1.6 ([#451](https://github.com/awslabs/aws-lambda-powertools-python/issues/451)) +* **deps**: bump boto3 from 1.17.78 to 1.17.84 ([#449](https://github.com/awslabs/aws-lambda-powertools-python/issues/449)) +* update mergify to require approval on dependabot ([#456](https://github.com/awslabs/aws-lambda-powertools-python/issues/456)) +* **deps**: bump actions/setup-python from 1 to 2.2.2 ([#445](https://github.com/awslabs/aws-lambda-powertools-python/issues/445)) +* **deps:** bump boto3 from 1.17.87 to 1.17.88 ([#463](https://github.com/awslabs/aws-lambda-powertools-python/issues/463)) +* **deps:** bump boto3 from 1.17.88 to 1.17.89 ([#466](https://github.com/awslabs/aws-lambda-powertools-python/issues/466)) +* **deps:** bump boto3 from 1.17.84 to 1.17.85 ([#455](https://github.com/awslabs/aws-lambda-powertools-python/issues/455)) +* **deps:** bump boto3 from 1.17.85 to 1.17.86 ([#458](https://github.com/awslabs/aws-lambda-powertools-python/issues/458)) +* **deps:** bump boto3 from 1.17.86 to 1.17.87 ([#459](https://github.com/awslabs/aws-lambda-powertools-python/issues/459)) +* **deps-dev:** bump mkdocs-material from 7.1.6 to 7.1.7 ([#464](https://github.com/awslabs/aws-lambda-powertools-python/issues/464)) +* **deps-dev:** bump pytest-cov from 2.12.0 to 2.12.1 ([#454](https://github.com/awslabs/aws-lambda-powertools-python/issues/454)) +* **mergify:** disable check for matrix jobs +* **mergify:** use job name to match GH Actions + ## [1.16.1] - 2021-05-23 ### Fixed diff --git a/Makefile b/Makefile index b0a075d39a2..da43c1de67a 100644 --- a/Makefile +++ b/Makefile @@ -79,3 +79,7 @@ release: pr poetry build $(MAKE) release-test $(MAKE) release-prod + +changelog: + @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" + docker run -v "${PWD}":/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. > TMP_CHANGELOG.md diff --git a/README.md b/README.md index b452dd37d1a..25e7b2e343d 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A suite of Python utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. ([AWS Lambda Powertools Java](https://github.com/awslabs/aws-lambda-powertools-java) is also available). -**[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[API Docs](https://awslabs.github.io/aws-lambda-powertools-python/api/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=)** | **[🐛Bug Report](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=bug%2C+triage&template=bug_report.md&title=)** | **[Hello world example](https://github.com/aws-samples/cookiecutter-aws-sam-python)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** +**[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Roadmap](https://github.com/awslabs/aws-lambda-powertools-roadmap/projects/1)** | **[Quick hello world example](https://github.com/aws-samples/cookiecutter-aws-sam-python)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** > **Join us on the AWS Developers Slack at `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-gu30gquv-EhwIYq3kHhhysaZ2aIX7ew)** diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 3231f30eccd..689409d9813 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -260,12 +260,18 @@ def _configure_sampling(self): ) def inject_lambda_context( - self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = None, correlation_id_path: str = None + self, + lambda_handler: Callable[[Dict, Any], Any] = None, + log_event: bool = None, + correlation_id_path: str = None, + clear_state: bool = False, ): """Decorator to capture Lambda contextual info and inject into logger Parameters ---------- + clear_state : bool, optional + Instructs logger to remove any custom keys previously added lambda_handler : Callable Method to inject the lambda context log_event : bool, optional @@ -311,7 +317,10 @@ def handler(event, context): if lambda_handler is None: logger.debug("Decorator called with parameters") return functools.partial( - self.inject_lambda_context, log_event=log_event, correlation_id_path=correlation_id_path + self.inject_lambda_context, + log_event=log_event, + correlation_id_path=correlation_id_path, + clear_state=clear_state, ) log_event = resolve_truthy_env_var_choice( @@ -322,7 +331,11 @@ def handler(event, context): def decorate(event, context): lambda_context = build_lambda_context_model(context) cold_start = _is_cold_start() - self.append_keys(cold_start=cold_start, **lambda_context.__dict__) + + if clear_state: + self.structure_logs(cold_start=cold_start, **lambda_context.__dict__) + else: + self.append_keys(cold_start=cold_start, **lambda_context.__dict__) if correlation_id_path: self.set_correlation_id(jmespath.search(correlation_id_path, event)) diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index a47c32ee07f..c5391880122 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -10,6 +10,7 @@ from .connect_contact_flow_event import ConnectContactFlowEvent from .dynamo_db_stream_event import DynamoDBStreamEvent from .event_bridge_event import EventBridgeEvent +from .event_source import event_source from .kinesis_stream_event import KinesisStreamEvent from .s3_event import S3Event from .ses_event import SESEvent @@ -31,4 +32,5 @@ "SESEvent", "SNSEvent", "SQSEvent", + "event_source", ] diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 73e064d0f26..159779c86a7 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -6,6 +6,7 @@ class ALBEventRequestContext(DictWrapper): @property def elb_target_group_arn(self) -> str: + """Target group arn for your Lambda function""" return self["requestContext"]["elb"]["targetGroupArn"] @@ -15,6 +16,7 @@ class ALBEvent(BaseProxyEvent): Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/services-alb.html + - https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html """ @property diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py index bc3a4a82995..1ec3d6157bf 100644 --- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py +++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py @@ -1,15 +1,42 @@ from enum import Enum -from typing import Dict, Iterator, List, Optional +from typing import Any, Dict, Iterator, List, Optional, Union from aws_lambda_powertools.utilities.data_classes.common import DictWrapper +class AttributeValueType(Enum): + Binary = "B" + BinarySet = "BS" + Boolean = "BOOL" + List = "L" + Map = "M" + Number = "N" + NumberSet = "NS" + Null = "NULL" + String = "S" + StringSet = "SS" + + class AttributeValue(DictWrapper): """Represents the data for an attribute - Documentation: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html + Documentation: + -------------- + - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html + - https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html """ + def __init__(self, data: Dict[str, Any]): + """AttributeValue constructor + + Parameters + ---------- + data: Dict[str, Any] + Raw lambda event dict + """ + super().__init__(data) + self.dynamodb_type = list(data.keys())[0] + @property def b_value(self) -> Optional[str]: """An attribute of type Base64-encoded binary data object @@ -106,6 +133,29 @@ def ss_value(self) -> Optional[List[str]]: """ return self.get("SS") + @property + def get_type(self) -> AttributeValueType: + """Get the attribute value type based on the contained data""" + return AttributeValueType(self.dynamodb_type) + + @property + def l_value(self) -> Optional[List["AttributeValue"]]: + """Alias of list_value""" + return self.list_value + + @property + def m_value(self) -> Optional[Dict[str, "AttributeValue"]]: + """Alias of map_value""" + return self.map_value + + @property + def get_value(self) -> Union[Optional[bool], Optional[str], Optional[List], Optional[Dict]]: + """Get the attribute value""" + try: + return getattr(self, f"{self.dynamodb_type.lower()}_value") + except AttributeError: + raise TypeError(f"Dynamodb type {self.dynamodb_type} is not supported") + def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]: """A dict of type String to AttributeValue object map @@ -224,6 +274,29 @@ class DynamoDBStreamEvent(DictWrapper): Documentation: ------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html + + Example + ------- + **Process dynamodb stream events and use get_type and get_value for handling conversions** + + from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + AttributeValueType, + AttributeValue, + ) + from aws_lambda_powertools.utilities.typing import LambdaContext + + + @event_source(data_class=DynamoDBStreamEvent) + def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): + for record in event.records: + key: AttributeValue = record.dynamodb.keys["id"] + if key == AttributeValueType.Number: + assert key.get_value == key.n_value + print(key.get_value) + elif key == AttributeValueType.Map: + assert key.get_value == key.map_value + print(key.get_value) """ @property diff --git a/aws_lambda_powertools/utilities/data_classes/event_source.py b/aws_lambda_powertools/utilities/data_classes/event_source.py new file mode 100644 index 00000000000..3968f923573 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/event_source.py @@ -0,0 +1,39 @@ +from typing import Any, Callable, Dict, Type + +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@lambda_handler_decorator +def event_source( + handler: Callable[[Any, LambdaContext], Any], + event: Dict[str, Any], + context: LambdaContext, + data_class: Type[DictWrapper], +): + """Middleware to create an instance of the passed in event source data class + + Parameters + ---------- + handler: Callable + Lambda's handler + event: Dict + Lambda's Event + context: Dict + Lambda's Context + data_class: Type[DictWrapper] + Data class type to instantiate + + Example + -------- + + **Sample usage** + + from aws_lambda_powertools.utilities.data_classes import S3Event, event_source + + @event_source(data_class=S3Event) + def handler(event: S3Event, context): + return {"key": event.object_key} + """ + return handler(data_class(event), context) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 0cbd34213c1..31aef6dc0f2 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -224,6 +224,7 @@ def _generate_hash(self, data: Any) -> str: Hashed representation of the provided data """ + data = getattr(data, "raw_event", data) # could be a data class depending on decorator order hashed_data = self.hash_function(json.dumps(data, cls=Encoder).encode()) return hashed_data.hexdigest() diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py index e6f63c4792d..1b118d28117 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -1,4 +1,5 @@ from .apigw import ApiGatewayEnvelope +from .apigwv2 import ApiGatewayV2Envelope from .base import BaseEnvelope from .cloudwatch import CloudWatchLogsEnvelope from .dynamodb import DynamoDBStreamEnvelope @@ -9,6 +10,7 @@ __all__ = [ "ApiGatewayEnvelope", + "ApiGatewayV2Envelope", "CloudWatchLogsEnvelope", "DynamoDBStreamEnvelope", "EventBridgeEnvelope", diff --git a/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py new file mode 100644 index 00000000000..a627e4da0e5 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py @@ -0,0 +1,32 @@ +import logging +from typing import Any, Dict, Optional, Type, Union + +from ..models import APIGatewayProxyEventV2Model +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class ApiGatewayV2Envelope(BaseEnvelope): + """API Gateway V2 envelope to extract data within body key""" + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Optional[Model]: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Type[Model] + Data model provided to parse after extracting data using envelope + + Returns + ------- + Any + Parsed detail payload with model provided + """ + logger.debug(f"Parsing incoming data with Api Gateway model V2 {APIGatewayProxyEventV2Model}") + parsed_envelope = APIGatewayProxyEventV2Model.parse_obj(data) + logger.debug(f"Parsing event payload in `detail` with {model}") + return self._parse(data=parsed_envelope.body, model=model) diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 0e59b2197a8..e3fb50a2d5d 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -5,6 +5,15 @@ APIGatewayEventRequestContext, APIGatewayProxyEventModel, ) +from .apigwv2 import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, + RequestContextV2AuthorizerIam, + RequestContextV2AuthorizerIamCognito, + RequestContextV2AuthorizerJwt, + RequestContextV2Http, +) from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel @@ -35,6 +44,13 @@ from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel __all__ = [ + "APIGatewayProxyEventV2Model", + "RequestContextV2", + "RequestContextV2Http", + "RequestContextV2Authorizer", + "RequestContextV2AuthorizerJwt", + "RequestContextV2AuthorizerIam", + "RequestContextV2AuthorizerIamCognito", "CloudWatchLogsData", "CloudWatchLogsDecode", "CloudWatchLogsLogEvent", diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py new file mode 100644 index 00000000000..4243315bb21 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -0,0 +1,71 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field +from pydantic.networks import IPvAnyNetwork + +from ..types import Literal + + +class RequestContextV2AuthorizerIamCognito(BaseModel): + amr: List[str] + identityId: str + identityPoolId: str + + +class RequestContextV2AuthorizerIam(BaseModel): + accessKey: Optional[str] + accountId: Optional[str] + callerId: Optional[str] + principalOrgId: Optional[str] + userArn: Optional[str] + userId: Optional[str] + cognitoIdentity: RequestContextV2AuthorizerIamCognito + + +class RequestContextV2AuthorizerJwt(BaseModel): + claims: Dict[str, Any] + scopes: List[str] + + +class RequestContextV2Authorizer(BaseModel): + jwt: Optional[RequestContextV2AuthorizerJwt] + iam: Optional[RequestContextV2AuthorizerIam] + lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda") + + +class RequestContextV2Http(BaseModel): + method: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + path: str + protocol: str + sourceIp: IPvAnyNetwork + userAgent: str + + +class RequestContextV2(BaseModel): + accountId: str + apiId: str + authorizer: Optional[RequestContextV2Authorizer] + domainName: str + domainPrefix: str + requestId: str + routeKey: str + stage: str + time: str + timeEpoch: datetime + http: RequestContextV2Http + + +class APIGatewayProxyEventV2Model(BaseModel): + version: str + routeKey: str + rawPath: str + rawQueryString: str + cookies: Optional[List[str]] + headers: Dict[str, str] + queryStringParameters: Dict[str, str] + pathParameters: Optional[Dict[str, str]] + stageVariables: Optional[Dict[str, str]] + requestContext: RequestContextV2 + body: str + isBase64Encoded: bool diff --git a/docs/core/logger.md b/docs/core/logger.md index a544bf91e4b..45119ca51d6 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -231,8 +231,9 @@ We provide [built-in JMESPath expressions](#built-in-correlation-id-expressions) ### Appending additional keys -!!! info "Keys might be persisted across invocations" - Always set additional keys as part of your handler to ensure they have the latest value. Additional keys are kept in memory as part of a Logger instance and might be reused in non-cold start scenarios. +!!! info "Custom keys are persisted across warm invocations" + Always set additional keys as part of your handler to ensure they have the latest value, or explicitly clear them with [`clear_state=True`](#clearing-all-state). + You can append additional keys using either mechanism: @@ -426,6 +427,73 @@ You can remove any additional key from Logger state using `remove_keys`. } ``` +#### Clearing all state + +Logger is commonly initialized in the global scope. Due to [Lambda Execution Context reuse](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), this means that custom keys can be persisted across invocations. If you want all custom keys to be deleted, you can use `clear_state=True` param in `inject_lambda_context` decorator. + +!!! info + This is useful when you add multiple custom keys conditionally, instead of setting a default `None` value if not present. Any key with `None` value is automatically removed by Logger. + +!!! danger "This can have unintended side effects if you use Layers" + Lambda Layers code is imported before the Lambda handler. + + This means that `clear_state=True` will instruct Logger to remove any keys previously added before Lambda handler execution proceeds. + + You can either avoid running any code as part of Lambda Layers global scope, or override keys with their latest value as part of handler's execution. + +=== "collect.py" + + ```python hl_lines="5 8" + from aws_lambda_powertools import Logger + + logger = Logger(service="payment") + + @logger.inject_lambda_context(clear_state=True) + def handler(event, context): + if event.get("special_key"): + # Should only be available in the first request log + # as the second request doesn't contain `special_key` + logger.append_keys(debugging_key="value") + + logger.info("Collecting payment") + ``` + +=== "#1 request" + + ```json hl_lines="7" + { + "level": "INFO", + "location": "collect.handler:10", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "special_key": "debug_key", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" + } + ``` + +=== "#2 request" + + ```json hl_lines="7" + { + "level": "INFO", + "location": "collect.handler:10", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": false, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" + } + ``` + + ### Logging exceptions Use `logger.exception` method to log contextual information about exceptions. Logger will include `exception_name` and `exception` keys to aid troubleshooting and error enumeration. diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 0fc33d3a3f7..3217c5364d3 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -21,22 +21,35 @@ Lambda function. ### Utilizing the data classes -The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class. +The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class or +by using the `event_source` decorator. For example, if your Lambda function is being triggered by an API Gateway proxy integration, you can use the `APIGatewayProxyEvent` class. === "app.py" - ```python hl_lines="1 4" - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent +```python hl_lines="1 4" +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent - def lambda_handler(event, context): - event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) +def lambda_handler(event: dict, context): + event = APIGatewayProxyEvent(event) + if 'helloworld' in event.path and event.http_method == 'GET': + do_something_with(event.body, user) +``` - if 'helloworld' in event.path and event.http_method == 'GET': - do_something_with(event.body, user) - ``` +Same example as above, but using the `event_source` decorator + +=== "app.py" + +```python hl_lines="1 3" +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent + +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if 'helloworld' in event.path and event.http_method == 'GET': + do_something_with(event.body, user) +``` **Autocomplete with self-documented properties and methods** @@ -49,7 +62,8 @@ For example, if your Lambda function is being triggered by an API Gateway proxy Event Source | Data_class ------------------------------------------------- | --------------------------------------------------------------------------------- [API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent` -[API Gateway Proxy event v2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` +[API Gateway Proxy V2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` +[Application Load Balancer](#application-load-balancer) | `ALBEvent` [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` @@ -76,34 +90,47 @@ It is used for either API Gateway REST API or HTTP API using v1 proxy event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent - def lambda_handler(event, context): - event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if "helloworld" in event.path and event.http_method == "GET": request_context = event.request_context identity = request_context.identity + user = identity.user + do_something_with(event.json_body, user) +``` - if 'helloworld' in event.path and event.http_method == 'GET': - user = identity.user - do_something_with(event.body, user) - ``` +### API Gateway Proxy V2 -### API Gateway Proxy v2 +It is used for HTTP API using v2 proxy event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2 +```python +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEventV2 - def lambda_handler(event, context): - event: APIGatewayProxyEventV2 = APIGatewayProxyEventV2(event) - request_context = event.request_context - query_string_parameters = event.query_string_parameters +@event_source(data_class=APIGatewayProxyEventV2) +def lambda_handler(event: APIGatewayProxyEventV2, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) +``` - if 'helloworld' in event.raw_path and request_context.http.method == 'POST': - do_something_with(event.body, query_string_parameters) - ``` +### Application Load Balancer + +Is it used for Application load balancer event. + +=== "app.py" + +```python +from aws_lambda_powertools.utilities.data_classes import event_source, ALBEvent + +@event_source(data_class=ALBEvent) +def lambda_handler(event: ALBEvent, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) +``` ### AppSync Resolver @@ -210,18 +237,17 @@ decompress and parse json data from the event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import CloudWatchLogsEvent - from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData - - def lambda_handler(event, context): - event: CloudWatchLogsEvent = CloudWatchLogsEvent(event) +```python +from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchLogsEvent +from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData - decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data - log_events = decompressed_log.log_events - for event in log_events: - do_something_with(event.timestamp, event.message) - ``` +@event_source(data_class=CloudWatchLogsEvent) +def lambda_handler(event: CloudWatchLogsEvent, context): + decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data + log_events = decompressed_log.log_events + for event in log_events: + do_something_with(event.timestamp, event.message) +``` ### CodePipeline Job @@ -229,51 +255,50 @@ Data classes and utility functions to help create continuous delivery pipelines === "app.py" - ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent +```python +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import event_source, CodePipelineJobEvent - logger = Logger() +logger = Logger() +@event_source(data_class=CodePipelineJobEvent) +def lambda_handler(event, context): + """The Lambda function handler - def lambda_handler(event, context): - """The Lambda function handler - - If a continuing job then checks the CloudFormation stack status - and updates the job accordingly. - - If a new job then kick of an update or creation of the target - CloudFormation stack. - """ - event: CodePipelineJobEvent = CodePipelineJobEvent(event) - - # Extract the Job ID - job_id = event.get_id - - # Extract the params - params: dict = event.decoded_user_parameters - stack = params["stack"] - artifact_name = params["artifact"] - template_file = params["file"] - - try: - if event.data.continuation_token: - # If we're continuing then the create/update has already been triggered - # we just need to check if it has finished. - check_stack_update_status(job_id, stack) - else: - template = event.get_artifact(artifact_name, template_file) - # Kick off a stack update or create - start_update_or_create(job_id, stack, template) - except Exception as e: - # If any other exceptions which we didn't expect are raised - # then fail the job and log the exception message. - logger.exception("Function failed due to exception.") - put_job_failure(job_id, "Function exception: " + str(e)) - - logger.debug("Function complete.") - return "Complete." - ``` + If a continuing job then checks the CloudFormation stack status + and updates the job accordingly. + + If a new job then kick of an update or creation of the target + CloudFormation stack. + """ + + # Extract the Job ID + job_id = event.get_id + + # Extract the params + params: dict = event.decoded_user_parameters + stack = params["stack"] + artifact_name = params["artifact"] + template_file = params["file"] + + try: + if event.data.continuation_token: + # If we're continuing then the create/update has already been triggered + # we just need to check if it has finished. + check_stack_update_status(job_id, stack) + else: + template = event.get_artifact(artifact_name, template_file) + # Kick off a stack update or create + start_update_or_create(job_id, stack, template) + except Exception as e: + # If any other exceptions which we didn't expect are raised + # then fail the job and log the exception message. + logger.exception("Function failed due to exception.") + put_job_failure(job_id, "Function exception: " + str(e)) + + logger.debug("Function complete.") + return "Complete." +``` ### Cognito User Pool @@ -297,15 +322,15 @@ Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChalleng === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent - def lambda_handler(event, context): - event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) +def lambda_handler(event, context): + event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) - user_attributes = event.request.user_attributes - do_something_with(user_attributes) - ``` + user_attributes = event.request.user_attributes + do_something_with(user_attributes) +``` #### Define Auth Challenge Example @@ -470,17 +495,18 @@ This example is based on the AWS Cognito docs for [Create Auth Challenge Lambda === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent - def handler(event: dict, context) -> dict: - event: CreateAuthChallengeTriggerEvent = CreateAuthChallengeTriggerEvent(event) - if event.request.challenge_name == "CUSTOM_CHALLENGE": - event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} - event.response.private_challenge_parameters = {"answer": "5"} - event.response.challenge_metadata = "CAPTCHA_CHALLENGE" - return event.raw_event - ``` +@event_source(data_class=CreateAuthChallengeTriggerEvent) +def handler(event: CreateAuthChallengeTriggerEvent, context) -> dict: + if event.request.challenge_name == "CUSTOM_CHALLENGE": + event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} + event.response.private_challenge_parameters = {"answer": "5"} + event.response.challenge_metadata = "CAPTCHA_CHALLENGE" + return event.raw_event +``` #### Verify Auth Challenge Response Example @@ -488,16 +514,17 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent - def handler(event: dict, context) -> dict: - event: VerifyAuthChallengeResponseTriggerEvent = VerifyAuthChallengeResponseTriggerEvent(event) - event.response.answer_correct = ( - event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer - ) - return event.raw_event - ``` +@event_source(data_class=VerifyAuthChallengeResponseTriggerEvent) +def handler(event: VerifyAuthChallengeResponseTriggerEvent, context) -> dict: + event.response.answer_correct = ( + event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer + ) + return event.raw_event +``` ### Connect Contact Flow @@ -505,21 +532,21 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( - ConnectContactFlowChannel, - ConnectContactFlowEndpointType, - ConnectContactFlowEvent, - ConnectContactFlowInitiationMethod, - ) - - def lambda_handler(event, context): - event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) - assert event.contact_data.attributes == {"Language": "en-US"} - assert event.contact_data.channel == ConnectContactFlowChannel.VOICE - assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER - assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API - ``` +```python +from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( + ConnectContactFlowChannel, + ConnectContactFlowEndpointType, + ConnectContactFlowEvent, + ConnectContactFlowInitiationMethod, +) + +def lambda_handler(event, context): + event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) + assert event.contact_data.attributes == {"Language": "en-US"} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API +``` ### DynamoDB Streams @@ -529,34 +556,55 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - DynamoDBStreamEvent, - DynamoDBRecordEventName - ) - - def lambda_handler(event, context): - event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) - - # Multiple records can be delivered in a single event - for record in event.records: - if record.event_name == DynamoDBRecordEventName.MODIFY: - do_something_with(record.dynamodb.new_image) - do_something_with(record.dynamodb.old_image) - ``` + ```python + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBStreamEvent, + DynamoDBRecordEventName + ) + + def lambda_handler(event, context): + event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + if record.event_name == DynamoDBRecordEventName.MODIFY: + do_something_with(record.dynamodb.new_image) + do_something_with(record.dynamodb.old_image) + ``` + +=== "multiple_records_types.py" + + ```python + from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import AttributeValueType, AttributeValue + from aws_lambda_powertools.utilities.typing import LambdaContext + + + @event_source(data_class=DynamoDBStreamEvent) + def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): + for record in event.records: + key: AttributeValue = record.dynamodb.keys["id"] + if key == AttributeValueType.Number: + # {"N": "123.45"} => "123.45" + assert key.get_value == key.n_value + print(key.get_value) + elif key == AttributeValueType.Map: + assert key.get_value == key.map_value + print(key.get_value) + ``` ### EventBridge === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent - def lambda_handler(event, context): - event: EventBridgeEvent = EventBridgeEvent(event) - do_something_with(event.detail) +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event: EventBridgeEvent, context): + do_something_with(event.detail) - ``` +``` ### Kinesis streams @@ -565,40 +613,40 @@ or plain text, depending on the original payload. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import KinesisStreamEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, KinesisStreamEvent - def lambda_handler(event, context): - event: KinesisStreamEvent = KinesisStreamEvent(event) - kinesis_record = next(event.records).kinesis +@event_source(data_class=KinesisStreamEvent) +def lambda_handler(event: KinesisStreamEvent, context): + kinesis_record = next(event.records).kinesis - # if data was delivered as text - data = kinesis_record.data_as_text() + # if data was delivered as text + data = kinesis_record.data_as_text() - # if data was delivered as json - data = kinesis_record.data_as_json() + # if data was delivered as json + data = kinesis_record.data_as_json() - do_something_with(data) - ``` + do_something_with(data) +``` ### S3 === "app.py" - ```python - from urllib.parse import unquote_plus - from aws_lambda_powertools.utilities.data_classes import S3Event +```python +from urllib.parse import unquote_plus +from aws_lambda_powertools.utilities.data_classes import event_source, S3Event - def lambda_handler(event, context): - event: S3Event = S3Event(event) - bucket_name = event.bucket_name +@event_source(data_class=S3Event) +def lambda_handler(event: S3Event, context): + bucket_name = event.bucket_name - # Multiple records can be delivered in a single event - for record in event.records: - object_key = unquote_plus(record.s3.get_object.key) + # Multiple records can be delivered in a single event + for record in event.records: + object_key = unquote_plus(record.s3.get_object.key) - do_something_with(f'{bucket_name}/{object_key}') - ``` + do_something_with(f"{bucket_name}/{object_key}") +``` ### S3 Object Lambda @@ -606,84 +654,81 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" - ```python hl_lines="5-6 12 14" - import boto3 - import requests +```python hl_lines="5-6 12 14" +import boto3 +import requests - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA - from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA +from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent - logger = Logger() - session = boto3.Session() - s3 = session.client("s3") +logger = Logger() +session = boto3.Session() +s3 = session.client("s3") - @logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) - def lambda_handler(event, context): - event = S3ObjectLambdaEvent(event) +@logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) +def lambda_handler(event, context): + event = S3ObjectLambdaEvent(event) - # Get object from S3 - response = requests.get(event.input_s3_url) - original_object = response.content.decode("utf-8") + # Get object from S3 + response = requests.get(event.input_s3_url) + original_object = response.content.decode("utf-8") - # Make changes to the object about to be returned - transformed_object = original_object.upper() + # Make changes to the object about to be returned + transformed_object = original_object.upper() - # Write object back to S3 Object Lambda - s3.write_get_object_response( - Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token - ) + # Write object back to S3 Object Lambda + s3.write_get_object_response( + Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token + ) - return {"status_code": 200} - ``` + return {"status_code": 200} +``` ### SES === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SESEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SESEvent - def lambda_handler(event, context): - event: SESEvent = SESEvent(event) +@event_source(data_class=SESEvent) +def lambda_handler(event: SESEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + mail = record.ses.mail + common_headers = mail.common_headers - # Multiple records can be delivered in a single event - for record in event.records: - mail = record.ses.mail - common_headers = mail.common_headers - - do_something_with(common_headers.to, common_headers.subject) - ``` + do_something_with(common_headers.to, common_headers.subject) +``` ### SNS === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SNSEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SNSEvent - def lambda_handler(event, context): - event: SNSEvent = SNSEvent(event) +@event_source(data_class=SNSEvent) +def lambda_handler(event: SNSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + message = record.sns.message + subject = record.sns.subject - # Multiple records can be delivered in a single event - for record in event.records: - message = record.sns.message - subject = record.sns.subject - - do_something_with(subject, message) - ``` + do_something_with(subject, message) +``` ### SQS === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SQSEvent - - def lambda_handler(event, context): - event: SQSEvent = SQSEvent(event) +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SQSEvent - # Multiple records can be delivered in a single event - for record in event.records: - do_something_with(record.body) - ``` +@event_source(data_class=SQSEvent) +def lambda_handler(event: SQSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + do_something_with(record.body) +``` diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index bd9a8f8e98b..a684695b36c 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -85,8 +85,6 @@ TTL attribute name | `expiration` | This can only be configured after your table see 1WCU and 1RCU. Review the [DynamoDB pricing documentation](https://aws.amazon.com/dynamodb/pricing/) to estimate the cost. -!!! danger "CREATE SECTION FOR PERSISTENCE LAYERS" - ### Idempotent decorator You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 83fca6b6741..11dbaca48a8 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -162,6 +162,7 @@ Parser comes with the following built-in models: | **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | | **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | | **APIGatewayProxyEvent** | Lambda Event Source payload for Amazon API Gateway | +| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload | ### extending built-in models @@ -295,17 +296,17 @@ Here's an example of parsing a model found in an event coming from EventBridge, Parser comes with the following built-in envelopes, where `Model` in the return section is your given model. -| Envelope name | Behaviour | Return | -| -------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | -| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | -| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | -| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | -| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | -| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | -| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | - +| Envelope name | Behaviour | Return | +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | +| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | +| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | +| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | +| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | +| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | +| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | +| **ApiGatewayV2Envelope** | 1. Parses data using `APIGatewayProxyEventV2Model`.
2. Parses `body` key using your model and returns it. | `Model` | ### Bringing your own envelope You can create your own Envelope model and logic by inheriting from `BaseEnvelope`, and implementing the `parse` method. diff --git a/mkdocs.yml b/mkdocs.yml index b07e30386dd..0a761ad9540 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,6 +7,7 @@ edit_uri: edit/develop/docs nav: - Homepage: index.md - Changelog: changelog.md + - Roadmap: https://github.com/awslabs/aws-lambda-powertools-roadmap/projects/1" target="_blank - API reference: api/" target="_blank - Core utilities: - core/tracer.md diff --git a/poetry.lock b/poetry.lock index 45512c39eb1..ca9ac489f80 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.78" +version = "1.17.89" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.78,<1.21.0" +botocore = ">=1.20.89,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.78" +version = "1.20.89" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -591,7 +591,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.1.5" +version = "7.1.7" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -789,32 +789,33 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-asyncio" -version = "0.14.0" +version = "0.15.1" description = "Pytest support for asyncio." category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">= 3.6" [package.dependencies] pytest = ">=5.4.0" [package.extras] -testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] +testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.12.0" +version = "2.12.1" description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = ">=5.2.1" pytest = ">=4.6" +toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" @@ -1035,7 +1036,7 @@ python-versions = "*" [[package]] name = "xenon" -version = "0.7.1" +version = "0.7.3" description = "Monitor code metrics for Python on your CI server" category = "dev" optional = false @@ -1043,7 +1044,7 @@ python-versions = "*" [package.dependencies] PyYAML = ">=4.2b1,<6.0" -radon = {version = ">=4,<5", extras = ["flake8"]} +radon = ">=4,<5" requests = ">=2.0,<3.0" [[package]] @@ -1064,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "86a0de7bd25e0ebdf9a7eb445eca6def21bb7365f4acc12e0512e233794928c9" +content-hash = "3159635f02dd232e8271d6fd4f6b1b92cefb6f8b8ada60bda6929f3839515862" [metadata.files] appdirs = [ @@ -1091,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.78-py2.py3-none-any.whl", hash = "sha256:1a87855123df1f18081a5fb8c1abde28d0096a03f6f3ebb06bcfb77cdffdae5e"}, - {file = "boto3-1.17.78.tar.gz", hash = "sha256:2a5caee63d45fbdcc85e710c7f4146112f5d10b22fd0176643d2f2914cce54df"}, + {file = "boto3-1.17.89-py2.py3-none-any.whl", hash = "sha256:1f02cd513b130f9cd86c99836de6a0a5f78ea55110bdbc9011d9d78ff0fd3204"}, + {file = "boto3-1.17.89.tar.gz", hash = "sha256:06d8dca85a0bb66b7bf2721745895d44691c78dbe7eb3b146702aff85e34af34"}, ] botocore = [ - {file = "botocore-1.20.78-py2.py3-none-any.whl", hash = "sha256:37105b9434d73f9c4d4960ee54c8eb129120f4c6681eb16edf483f03c5e2326d"}, - {file = "botocore-1.20.78.tar.gz", hash = "sha256:e74775f9e64e975787d76390fc5ac5aba875d726bb9ece3b7bd900205b430389"}, + {file = "botocore-1.20.89-py2.py3-none-any.whl", hash = "sha256:e112f9a45db1c5a42f787e4b228a35da6e823bcba70f43f43005b4fb58066446"}, + {file = "botocore-1.20.89.tar.gz", hash = "sha256:ce0fa8bc260ad187824052805d224cee239d953bb4bfb1e52cf35ad79481b316"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, @@ -1341,8 +1342,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.5.tar.gz", hash = "sha256:dc4849948695e22b3385518f6237e88164a201a013c6ca8257734a2df3d0c7c6"}, - {file = "mkdocs_material-7.1.5-py2.py3-none-any.whl", hash = "sha256:d2663b7a0a6ecd435eeb6c2686cd6a1a4e3bb6b6f021464d88a9894d8533e288"}, + {file = "mkdocs-material-7.1.7.tar.gz", hash = "sha256:34d57af1e3e68ff4251feb82ced70545d8aa6064861ba76b1a15928399d21879"}, + {file = "mkdocs_material-7.1.7-py2.py3-none-any.whl", hash = "sha256:1725d02efed5d989258fd1620673e78a7171f82028f30c2da8d21e7539150221"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, @@ -1428,12 +1429,12 @@ pytest = [ {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"}, - {file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"}, + {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, + {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] pytest-cov = [ - {file = "pytest-cov-2.12.0.tar.gz", hash = "sha256:8535764137fecce504a49c2b742288e3d34bc09eed298ad65963616cc98fd45e"}, - {file = "pytest_cov-2.12.0-py2.py3-none-any.whl", hash = "sha256:95d4933dcbbacfa377bb60b29801daa30d90c33981ab2a79e9ab4452c165066e"}, + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, @@ -1678,8 +1679,8 @@ wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] xenon = [ - {file = "xenon-0.7.1-py2.py3-none-any.whl", hash = "sha256:33d807ef805a2ed854adfcc7cc998398d5c0626a5ad443e52684b998a4dd4aa3"}, - {file = "xenon-0.7.1.tar.gz", hash = "sha256:38bf283135f0636355ecf6054b6f37226af12faab152161bda1a4f9e4dc5b701"}, + {file = "xenon-0.7.3-py2.py3-none-any.whl", hash = "sha256:a167b4c329fbea7cd84b148007ba92142f46b88ca095488c175dc7a8a8007ee9"}, + {file = "xenon-0.7.3.tar.gz", hash = "sha256:eda949fbf3cfb4851d49d97e961e2b18a6b66fbecaf285dc89230775d2b2a99f"}, ] zipp = [ {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, diff --git a/pyproject.toml b/pyproject.toml index 622387eb60b..b0ef085c31d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.16.1" +version = "1.17.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed"] @@ -40,16 +40,16 @@ flake8-fixme = "^1.1.1" flake8-isort = "^4.0.0" flake8-variables-names = "^0.0.4" isort = "^5.8.0" -pytest-cov = "^2.11.1" +pytest-cov = "^2.12.1" pytest-mock = "^3.5.1" pdoc3 = "^0.9.2" -pytest-asyncio = "^0.14.0" +pytest-asyncio = "^0.15.1" bandit = "^1.7.0" radon = "^4.5.0" -xenon = "^0.7.1" +xenon = "^0.7.3" flake8-eradicate = "^1.0.0" flake8-bugbear = "^21.3.2" -mkdocs-material = "^7.1.0" +mkdocs-material = "^7.1.7" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" diff --git a/tests/events/apiGatewayProxyV2Event.json b/tests/events/apiGatewayProxyV2Event.json index 4d0cfdf5703..5e001934fee 100644 --- a/tests/events/apiGatewayProxyV2Event.json +++ b/tests/events/apiGatewayProxyV2Event.json @@ -36,7 +36,7 @@ "method": "POST", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" }, "requestId": "id", @@ -54,4 +54,4 @@ "stageVariable1": "value1", "stageVariable2": "value2" } -} +} \ No newline at end of file diff --git a/tests/events/apiGatewayProxyV2IamEvent.json b/tests/events/apiGatewayProxyV2IamEvent.json index 73d50d78a4a..43f33e1678d 100644 --- a/tests/events/apiGatewayProxyV2IamEvent.json +++ b/tests/events/apiGatewayProxyV2IamEvent.json @@ -29,7 +29,9 @@ "accountId": "1234567890", "callerId": "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials", "cognitoIdentity": { - "amr" : ["foo"], + "amr": [ + "foo" + ], "identityId": "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce", "identityPoolId": "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" }, @@ -47,7 +49,7 @@ "method": "GET", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" } }, @@ -57,4 +59,4 @@ }, "body": "{\r\n\t\"a\": 1\r\n}", "isBase64Encoded": false -} +} \ No newline at end of file diff --git a/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json b/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json index 75d1574f854..cae3130de80 100644 --- a/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json +++ b/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json @@ -37,7 +37,7 @@ "method": "GET", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" } }, @@ -47,4 +47,4 @@ }, "body": "{\r\n\t\"a\": 1\r\n}", "isBase64Encoded": false -} +} \ No newline at end of file diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 25f76af48be..0cf19ab9de0 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -1,4 +1,5 @@ import copy +import hashlib import json import sys from hashlib import md5 @@ -7,6 +8,7 @@ import pytest from botocore import stub +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2, event_source from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, @@ -19,6 +21,7 @@ from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord from aws_lambda_powertools.utilities.validation import envelopes, validator +from tests.functional.utils import load_event TABLE_NAME = "TEST_TABLE" @@ -223,7 +226,7 @@ def lambda_handler(event, context): def test_idempotent_lambda_first_execution_cached( idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer, - lambda_apigw_event: DynamoDBPersistenceLayer, + lambda_apigw_event, expected_params_update_item, expected_params_put_item, lambda_response, @@ -845,3 +848,41 @@ def handler(event, context): handler({}, lambda_context) assert "No data found to create a hashed idempotency_key" == e.value.args[0] + + +class MockPersistenceLayer(BasePersistenceLayer): + def __init__(self, expected_idempotency_key: str): + self.expected_idempotency_key = expected_idempotency_key + super(MockPersistenceLayer, self).__init__() + + def _put_record(self, data_record: DataRecord) -> None: + assert data_record.idempotency_key == self.expected_idempotency_key + + def _update_record(self, data_record: DataRecord) -> None: + assert data_record.idempotency_key == self.expected_idempotency_key + + def _get_record(self, idempotency_key) -> DataRecord: + ... + + def _delete_record(self, data_record: DataRecord) -> None: + ... + + +def test_idempotent_lambda_event_source(lambda_context): + # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator + mock_event = load_event("apiGatewayProxyV2Event.json") + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + expected_result = {"message": "Foo"} + + # GIVEN an event_source decorator + # AND then an idempotent decorator + @event_source(data_class=APIGatewayProxyEventV2) + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + assert isinstance(event, APIGatewayProxyEventV2) + return expected_result + + # WHEN calling the lambda handler + result = lambda_handler(mock_event, lambda_context) + # THEN we expect the handler to execute successfully + assert result == expected_result diff --git a/tests/functional/parser/test_apigwv2.py b/tests/functional/parser/test_apigwv2.py new file mode 100644 index 00000000000..ee6a4790cd4 --- /dev/null +++ b/tests/functional/parser/test_apigwv2.py @@ -0,0 +1,92 @@ +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyApiGatewayBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) +def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): + assert event.message == "Hello" + assert event.username == "Ran" + + +@event_parser(model=APIGatewayProxyEventV2Model) +def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): + return event + + +def test_apigw_v2_event_with_envelope(): + event = load_event("apiGatewayProxyV2Event.json") + event["body"] = '{"message": "Hello", "username": "Ran"}' + handle_apigw_with_envelope(event, LambdaContext()) + + +def test_apigw_v2_event_jwt_authorizer(): + event = load_event("apiGatewayProxyV2Event.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + assert parsed_event.version == event["version"] + assert parsed_event.routeKey == event["routeKey"] + assert parsed_event.rawPath == event["rawPath"] + assert parsed_event.rawQueryString == event["rawQueryString"] + assert parsed_event.cookies == event["cookies"] + assert parsed_event.cookies[0] == "cookie1" + assert parsed_event.headers == event["headers"] + assert parsed_event.queryStringParameters == event["queryStringParameters"] + assert parsed_event.queryStringParameters["parameter2"] == "value" + + request_context = parsed_event.requestContext + assert request_context.accountId == event["requestContext"]["accountId"] + assert request_context.apiId == event["requestContext"]["apiId"] + assert request_context.authorizer.jwt.claims == event["requestContext"]["authorizer"]["jwt"]["claims"] + assert request_context.authorizer.jwt.scopes == event["requestContext"]["authorizer"]["jwt"]["scopes"] + assert request_context.domainName == event["requestContext"]["domainName"] + assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] + + http = request_context.http + assert http.method == "POST" + assert http.path == "/my/path" + assert http.protocol == "HTTP/1.1" + assert str(http.sourceIp) == "192.168.0.1/32" + assert http.userAgent == "agent" + + assert request_context.requestId == event["requestContext"]["requestId"] + assert request_context.routeKey == event["requestContext"]["routeKey"] + assert request_context.stage == event["requestContext"]["stage"] + assert request_context.time == event["requestContext"]["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == event["requestContext"]["timeEpoch"] + assert parsed_event.body == event["body"] + assert parsed_event.pathParameters == event["pathParameters"] + assert parsed_event.isBase64Encoded == event["isBase64Encoded"] + assert parsed_event.stageVariables == event["stageVariables"] + + +def test_api_gateway_proxy_v2_event_lambda_authorizer(): + event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + request_context: RequestContextV2 = parsed_event.requestContext + assert request_context is not None + lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value + assert lambda_props is not None + assert lambda_props["key"] == "value" + + +def test_api_gateway_proxy_v2_event_iam_authorizer(): + event = load_event("apiGatewayProxyV2IamEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + iam = parsed_event.requestContext.authorizer.iam + assert iam is not None + assert iam.accessKey == "ARIA2ZJZYVUEREEIHAKY" + assert iam.accountId == "1234567890" + assert iam.callerId == "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials" + assert iam.cognitoIdentity.amr == ["foo"] + assert iam.cognitoIdentity.identityId == "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce" + assert iam.cognitoIdentity.identityPoolId == "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" + assert iam.principalOrgId == "AwsOrgId" + assert iam.userArn == "arn:aws:iam::1234567890:user/Admin" + assert iam.userId == "AROA2ZJZYVRE7Y3TUXHH6" diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index f56d0700e6f..8b412860694 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -58,10 +58,12 @@ ) from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( AttributeValue, + AttributeValueType, DynamoDBRecordEventName, DynamoDBStreamEvent, StreamViewType, ) +from aws_lambda_powertools.utilities.data_classes.event_source import event_source from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent from tests.functional.utils import load_event @@ -442,6 +444,33 @@ def test_dynamo_db_stream_trigger_event(): assert record.user_identity is None +def test_dynamo_attribute_value_b_value(): + example_attribute_value = {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Binary + assert attribute_value.b_value == attribute_value.get_value + + +def test_dynamo_attribute_value_bs_value(): + example_attribute_value = {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.BinarySet + assert attribute_value.bs_value == attribute_value.get_value + + +def test_dynamo_attribute_value_bool_value(): + example_attribute_value = {"BOOL": True} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Boolean + assert attribute_value.bool_value == attribute_value.get_value + + def test_dynamo_attribute_value_list_value(): example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]} attribute_value = AttributeValue(example_attribute_value) @@ -449,6 +478,9 @@ def test_dynamo_attribute_value_list_value(): assert list_value is not None item = list_value[0] assert item.s_value == "Cookies" + assert attribute_value.get_type == AttributeValueType.List + assert attribute_value.l_value == attribute_value.list_value + assert attribute_value.list_value == attribute_value.get_value def test_dynamo_attribute_value_map_value(): @@ -460,6 +492,65 @@ def test_dynamo_attribute_value_map_value(): assert map_value is not None item = map_value["Name"] assert item.s_value == "Joe" + assert attribute_value.get_type == AttributeValueType.Map + assert attribute_value.m_value == attribute_value.map_value + assert attribute_value.map_value == attribute_value.get_value + + +def test_dynamo_attribute_value_n_value(): + example_attribute_value = {"N": "123.45"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Number + assert attribute_value.n_value == attribute_value.get_value + + +def test_dynamo_attribute_value_ns_value(): + example_attribute_value = {"NS": ["42.2", "-19", "7.5", "3.14"]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.NumberSet + assert attribute_value.ns_value == attribute_value.get_value + + +def test_dynamo_attribute_value_null_value(): + example_attribute_value = {"NULL": True} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Null + assert attribute_value.null_value == attribute_value.get_value + + +def test_dynamo_attribute_value_s_value(): + example_attribute_value = {"S": "Hello"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.String + assert attribute_value.s_value == attribute_value.get_value + + +def test_dynamo_attribute_value_ss_value(): + example_attribute_value = {"SS": ["Giraffe", "Hippo", "Zebra"]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.StringSet + assert attribute_value.ss_value == attribute_value.get_value + + +def test_dynamo_attribute_value_type_error(): + example_attribute_value = {"UNSUPPORTED": "'value' should raise a type error"} + + attribute_value = AttributeValue(example_attribute_value) + + with pytest.raises(TypeError): + print(attribute_value.get_value) + with pytest.raises(ValueError): + print(attribute_value.get_type) def test_event_bridge_event(): @@ -743,7 +834,7 @@ def test_api_gateway_proxy_v2_event(): assert http.method == "POST" assert http.path == "/my/path" assert http.protocol == "HTTP/1.1" - assert http.source_ip == "IP" + assert http.source_ip == "192.168.0.1/32" assert http.user_agent == "agent" assert request_context.request_id == event["requestContext"]["requestId"] @@ -1237,3 +1328,15 @@ def download_file(bucket: str, key: str, tmp_name: str): } ) assert artifact_str == file_contents + + +def test_reflected_types(): + # GIVEN an event_source decorator + @event_source(data_class=APIGatewayProxyEventV2) + def lambda_handler(event: APIGatewayProxyEventV2, _): + # THEN we except the event to be of the pass in data class type + assert isinstance(event, APIGatewayProxyEventV2) + assert event.get_header_value("x-foo") == "Foo" + + # WHEN calling the lambda handler + lambda_handler({"headers": {"X-Foo": "Foo"}}, None) diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index ba6e82b72af..44249af6250 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -562,3 +562,23 @@ def handler(event, context): # THEN we should output to a file not stdout log = log_file.read_text() assert "custom handler" in log + + +def test_clear_state_on_inject_lambda_context(lambda_context, stdout, service_name): + # GIVEN + logger = Logger(service=service_name, stream=stdout) + + # WHEN clear_state is set and a key was conditionally added in the first invocation + @logger.inject_lambda_context(clear_state=True) + def handler(event, context): + if event.get("add_key"): + logger.append_keys(my_key="value") + logger.info("Foo") + + # THEN custom key should only exist in the first log + handler({"add_key": True}, lambda_context) + handler({}, lambda_context) + + first_log, second_log = capture_multiple_logging_statements_output(stdout) + assert "my_key" in first_log + assert "my_key" not in second_log