From bfb67e7230e9192c602e995659611692e01ec7a7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 28 May 2021 14:34:34 +0200 Subject: [PATCH 01/35] chore: trial boring cyborg automation --- .github/boring-cyborg.yml | 86 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 .github/boring-cyborg.yml diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml new file mode 100644 index 00000000000..89d93b17e37 --- /dev/null +++ b/.github/boring-cyborg.yml @@ -0,0 +1,86 @@ +##### Labeler ########################################################################################################## +labelPRBasedOnFilePath: + area/utilities: + - aws_lambda_powertools/utilities/**/* + + # Add 'label2' to any file changes within 'example2' folder + documentation: + - docs/* + - docs/**/* + +# # Complex: Add 'area/core' label to any change within the 'core' package +# area/core: +# - src/core/* +# - src/core/**/* +# +# # Add 'test' label to any change to *.spec.js files within the source dir +# test: +# - src/**/*.spec.js + +##### Greetings ######################################################################################################## +firstPRWelcomeComment: > + Thanks a lot for your first contribution! Please check out our contributing guidelines and don't hesitate to ask whatever you need. + +# Comment to be posted to congratulate user on their first merged PR +firstPRMergeComment: > + Awesome work, congrats on your first merged pull request and thank you for helping improve everyone's experience! + +# Comment to be posted to on first time issues +firstIssueWelcomeComment: > + Thanks for opening your first issue here! We'll come back to you as soon as we can. + + +###### IssueLink Adder ################################################################################################# +# Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. +#insertIssueLinkInPrDescription: +# # specify the placeholder for the issue link that should be present in the description +# descriptionIssuePlaceholderRegexp: "^Issue link: (.*)$" +# matchers: +# # you can have several matches - for different types of issues +# # only the first matching entry is replaced +# jiraIssueMatch: +# # specify the regexp of issue id that you can find in the title of the PR +# # the match groups can be used to build the issue id (${1}, ${2}, etc.). +# titleIssueIdRegexp: \[(AIRFLOW-[0-9]{4})\] +# # the issue link to be added. ${1}, ${2} ... are replaced with the match groups from the +# # title match (remember to use quotes) +# descriptionIssueLink: "[${1}](https://issues.apache.org/jira/browse/${1}/)" +# docOnlyIssueMatch: +# titleIssueIdRegexp: \[(AIRFLOW-X{4})\] +# descriptionIssueLink: "`Document only change, no JIRA issue`" + +###### Title Validator ################################################################################################# +# Verifies if commit/PR titles match the regexp specified +#verifyTitles: +# # Regular expression that should be matched by titles of commits or PR +# titleRegexp: ^\[AIRFLOW-[0-9]{4}\].*$|^\[AIRFLOW-XXXX\].*$ +# # If set to true, it will always check the PR title (as opposed to the individual commits). +# alwaysUsePrTitle: true +# # If set to true, it will only check the commit in case there is a single commit. +# # In case of multiple commits it will check PR title. +# # This reflects the standard behaviour of Github that for `Squash & Merge` GitHub +# # uses the PR title rather than commit messages for the squashed commit ¯\_(ツ)_/¯ +# # For single-commit PRs it takes the squashed commit message from the commit as expected. +# # +# # If set to false it will check all commit messages. This is useful when you do not squash commits at merge. +# validateEitherPrOrSingleCommitTitle: true +# # The title the GitHub status should appear from. +# statusTitle: "Title Validator" +# # A custom message to be displayed when the title passes validation. +# successMessage: "Validation successful!" +# # A custom message to be displayed when the title fails validation. +# # Allows insertion of ${type} (commit/PR), ${title} (the title validated) and ${regex} (the titleRegexp above). +# failureMessage: "Wrong ${type} title: ${title}" + +###### PR/Branch Up-To-Date Checker #################################################################################### +# Check if the branch is up to date with develop when certain files are modified +#checkUpToDate: +# # The default branch is "develop", change the branch if you want to check against a different target branch +# targetBranch: develop +# files: +# # File paths that you want to check for +# # In this example, it checks if the branch is up to date when alembic migrations are modified in the PR. +# # It helps avoid multiple heads in alembic migrations in a collaborative development project. +# - airflow/migrations/* +# - airflow/migrations/**/* +# - airflow/alembic.ini From 995c56ee6f72be117300c20bce6c767f9d81f78c Mon Sep 17 00:00:00 2001 From: Ran Isenberg <60175085+risenberg-cyberark@users.noreply.github.com> Date: Fri, 28 May 2021 15:48:00 +0300 Subject: [PATCH 02/35] feat(parser): add support for API Gateway HTTP API #434 (#441) Co-authored-by: Heitor Lessa Co-authored-by: heitorlessa --- .../utilities/parser/envelopes/__init__.py | 2 + .../utilities/parser/envelopes/apigwv2.py | 32 +++++++ .../utilities/parser/models/__init__.py | 16 ++++ .../utilities/parser/models/apigwv2.py | 71 ++++++++++++++ docs/utilities/parser.md | 23 ++--- tests/events/apiGatewayProxyV2Event.json | 4 +- tests/events/apiGatewayProxyV2IamEvent.json | 8 +- ...piGatewayProxyV2LambdaAuthorizerEvent.json | 4 +- tests/functional/parser/test_apigwv2.py | 92 +++++++++++++++++++ tests/functional/test_data_classes.py | 2 +- 10 files changed, 235 insertions(+), 19 deletions(-) create mode 100644 aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py create mode 100644 aws_lambda_powertools/utilities/parser/models/apigwv2.py create mode 100644 tests/functional/parser/test_apigwv2.py diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py index e6f63c4792d..1b118d28117 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -1,4 +1,5 @@ from .apigw import ApiGatewayEnvelope +from .apigwv2 import ApiGatewayV2Envelope from .base import BaseEnvelope from .cloudwatch import CloudWatchLogsEnvelope from .dynamodb import DynamoDBStreamEnvelope @@ -9,6 +10,7 @@ __all__ = [ "ApiGatewayEnvelope", + "ApiGatewayV2Envelope", "CloudWatchLogsEnvelope", "DynamoDBStreamEnvelope", "EventBridgeEnvelope", diff --git a/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py new file mode 100644 index 00000000000..a627e4da0e5 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py @@ -0,0 +1,32 @@ +import logging +from typing import Any, Dict, Optional, Type, Union + +from ..models import APIGatewayProxyEventV2Model +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class ApiGatewayV2Envelope(BaseEnvelope): + """API Gateway V2 envelope to extract data within body key""" + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Optional[Model]: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Type[Model] + Data model provided to parse after extracting data using envelope + + Returns + ------- + Any + Parsed detail payload with model provided + """ + logger.debug(f"Parsing incoming data with Api Gateway model V2 {APIGatewayProxyEventV2Model}") + parsed_envelope = APIGatewayProxyEventV2Model.parse_obj(data) + logger.debug(f"Parsing event payload in `detail` with {model}") + return self._parse(data=parsed_envelope.body, model=model) diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 0e59b2197a8..e3fb50a2d5d 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -5,6 +5,15 @@ APIGatewayEventRequestContext, APIGatewayProxyEventModel, ) +from .apigwv2 import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, + RequestContextV2AuthorizerIam, + RequestContextV2AuthorizerIamCognito, + RequestContextV2AuthorizerJwt, + RequestContextV2Http, +) from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel @@ -35,6 +44,13 @@ from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel __all__ = [ + "APIGatewayProxyEventV2Model", + "RequestContextV2", + "RequestContextV2Http", + "RequestContextV2Authorizer", + "RequestContextV2AuthorizerJwt", + "RequestContextV2AuthorizerIam", + "RequestContextV2AuthorizerIamCognito", "CloudWatchLogsData", "CloudWatchLogsDecode", "CloudWatchLogsLogEvent", diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py new file mode 100644 index 00000000000..4243315bb21 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -0,0 +1,71 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field +from pydantic.networks import IPvAnyNetwork + +from ..types import Literal + + +class RequestContextV2AuthorizerIamCognito(BaseModel): + amr: List[str] + identityId: str + identityPoolId: str + + +class RequestContextV2AuthorizerIam(BaseModel): + accessKey: Optional[str] + accountId: Optional[str] + callerId: Optional[str] + principalOrgId: Optional[str] + userArn: Optional[str] + userId: Optional[str] + cognitoIdentity: RequestContextV2AuthorizerIamCognito + + +class RequestContextV2AuthorizerJwt(BaseModel): + claims: Dict[str, Any] + scopes: List[str] + + +class RequestContextV2Authorizer(BaseModel): + jwt: Optional[RequestContextV2AuthorizerJwt] + iam: Optional[RequestContextV2AuthorizerIam] + lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda") + + +class RequestContextV2Http(BaseModel): + method: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + path: str + protocol: str + sourceIp: IPvAnyNetwork + userAgent: str + + +class RequestContextV2(BaseModel): + accountId: str + apiId: str + authorizer: Optional[RequestContextV2Authorizer] + domainName: str + domainPrefix: str + requestId: str + routeKey: str + stage: str + time: str + timeEpoch: datetime + http: RequestContextV2Http + + +class APIGatewayProxyEventV2Model(BaseModel): + version: str + routeKey: str + rawPath: str + rawQueryString: str + cookies: Optional[List[str]] + headers: Dict[str, str] + queryStringParameters: Dict[str, str] + pathParameters: Optional[Dict[str, str]] + stageVariables: Optional[Dict[str, str]] + requestContext: RequestContextV2 + body: str + isBase64Encoded: bool diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 83fca6b6741..11dbaca48a8 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -162,6 +162,7 @@ Parser comes with the following built-in models: | **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | | **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | | **APIGatewayProxyEvent** | Lambda Event Source payload for Amazon API Gateway | +| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload | ### extending built-in models @@ -295,17 +296,17 @@ Here's an example of parsing a model found in an event coming from EventBridge, Parser comes with the following built-in envelopes, where `Model` in the return section is your given model. -| Envelope name | Behaviour | Return | -| -------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | -| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | -| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | -| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | -| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | -| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | -| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | - +| Envelope name | Behaviour | Return | +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | +| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | +| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | +| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | +| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | +| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | +| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | +| **ApiGatewayV2Envelope** | 1. Parses data using `APIGatewayProxyEventV2Model`.
2. Parses `body` key using your model and returns it. | `Model` | ### Bringing your own envelope You can create your own Envelope model and logic by inheriting from `BaseEnvelope`, and implementing the `parse` method. diff --git a/tests/events/apiGatewayProxyV2Event.json b/tests/events/apiGatewayProxyV2Event.json index 4d0cfdf5703..5e001934fee 100644 --- a/tests/events/apiGatewayProxyV2Event.json +++ b/tests/events/apiGatewayProxyV2Event.json @@ -36,7 +36,7 @@ "method": "POST", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" }, "requestId": "id", @@ -54,4 +54,4 @@ "stageVariable1": "value1", "stageVariable2": "value2" } -} +} \ No newline at end of file diff --git a/tests/events/apiGatewayProxyV2IamEvent.json b/tests/events/apiGatewayProxyV2IamEvent.json index 73d50d78a4a..43f33e1678d 100644 --- a/tests/events/apiGatewayProxyV2IamEvent.json +++ b/tests/events/apiGatewayProxyV2IamEvent.json @@ -29,7 +29,9 @@ "accountId": "1234567890", "callerId": "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials", "cognitoIdentity": { - "amr" : ["foo"], + "amr": [ + "foo" + ], "identityId": "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce", "identityPoolId": "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" }, @@ -47,7 +49,7 @@ "method": "GET", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" } }, @@ -57,4 +59,4 @@ }, "body": "{\r\n\t\"a\": 1\r\n}", "isBase64Encoded": false -} +} \ No newline at end of file diff --git a/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json b/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json index 75d1574f854..cae3130de80 100644 --- a/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json +++ b/tests/events/apiGatewayProxyV2LambdaAuthorizerEvent.json @@ -37,7 +37,7 @@ "method": "GET", "path": "/my/path", "protocol": "HTTP/1.1", - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "userAgent": "agent" } }, @@ -47,4 +47,4 @@ }, "body": "{\r\n\t\"a\": 1\r\n}", "isBase64Encoded": false -} +} \ No newline at end of file diff --git a/tests/functional/parser/test_apigwv2.py b/tests/functional/parser/test_apigwv2.py new file mode 100644 index 00000000000..ee6a4790cd4 --- /dev/null +++ b/tests/functional/parser/test_apigwv2.py @@ -0,0 +1,92 @@ +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyApiGatewayBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) +def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): + assert event.message == "Hello" + assert event.username == "Ran" + + +@event_parser(model=APIGatewayProxyEventV2Model) +def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): + return event + + +def test_apigw_v2_event_with_envelope(): + event = load_event("apiGatewayProxyV2Event.json") + event["body"] = '{"message": "Hello", "username": "Ran"}' + handle_apigw_with_envelope(event, LambdaContext()) + + +def test_apigw_v2_event_jwt_authorizer(): + event = load_event("apiGatewayProxyV2Event.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + assert parsed_event.version == event["version"] + assert parsed_event.routeKey == event["routeKey"] + assert parsed_event.rawPath == event["rawPath"] + assert parsed_event.rawQueryString == event["rawQueryString"] + assert parsed_event.cookies == event["cookies"] + assert parsed_event.cookies[0] == "cookie1" + assert parsed_event.headers == event["headers"] + assert parsed_event.queryStringParameters == event["queryStringParameters"] + assert parsed_event.queryStringParameters["parameter2"] == "value" + + request_context = parsed_event.requestContext + assert request_context.accountId == event["requestContext"]["accountId"] + assert request_context.apiId == event["requestContext"]["apiId"] + assert request_context.authorizer.jwt.claims == event["requestContext"]["authorizer"]["jwt"]["claims"] + assert request_context.authorizer.jwt.scopes == event["requestContext"]["authorizer"]["jwt"]["scopes"] + assert request_context.domainName == event["requestContext"]["domainName"] + assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] + + http = request_context.http + assert http.method == "POST" + assert http.path == "/my/path" + assert http.protocol == "HTTP/1.1" + assert str(http.sourceIp) == "192.168.0.1/32" + assert http.userAgent == "agent" + + assert request_context.requestId == event["requestContext"]["requestId"] + assert request_context.routeKey == event["requestContext"]["routeKey"] + assert request_context.stage == event["requestContext"]["stage"] + assert request_context.time == event["requestContext"]["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == event["requestContext"]["timeEpoch"] + assert parsed_event.body == event["body"] + assert parsed_event.pathParameters == event["pathParameters"] + assert parsed_event.isBase64Encoded == event["isBase64Encoded"] + assert parsed_event.stageVariables == event["stageVariables"] + + +def test_api_gateway_proxy_v2_event_lambda_authorizer(): + event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + request_context: RequestContextV2 = parsed_event.requestContext + assert request_context is not None + lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value + assert lambda_props is not None + assert lambda_props["key"] == "value" + + +def test_api_gateway_proxy_v2_event_iam_authorizer(): + event = load_event("apiGatewayProxyV2IamEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) + iam = parsed_event.requestContext.authorizer.iam + assert iam is not None + assert iam.accessKey == "ARIA2ZJZYVUEREEIHAKY" + assert iam.accountId == "1234567890" + assert iam.callerId == "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials" + assert iam.cognitoIdentity.amr == ["foo"] + assert iam.cognitoIdentity.identityId == "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce" + assert iam.cognitoIdentity.identityPoolId == "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" + assert iam.principalOrgId == "AwsOrgId" + assert iam.userArn == "arn:aws:iam::1234567890:user/Admin" + assert iam.userId == "AROA2ZJZYVRE7Y3TUXHH6" diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index f56d0700e6f..07648f84ee9 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -743,7 +743,7 @@ def test_api_gateway_proxy_v2_event(): assert http.method == "POST" assert http.path == "/my/path" assert http.protocol == "HTTP/1.1" - assert http.source_ip == "IP" + assert http.source_ip == "192.168.0.1/32" assert http.user_agent == "agent" assert request_context.request_id == event["requestContext"]["requestId"] From b2b3e42051f21402d056bbd39de5ece62ee3b952 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 28 May 2021 15:49:21 +0200 Subject: [PATCH 03/35] chore: assited changelog pre-generation, auto-label PR (#443) --- .chglog/CHANGELOG.tpl.md | 56 +++++++++++++++++++++++++++++++++++ .chglog/config.yml | 32 ++++++++++++++++++++ .github/auto-label.json | 10 ------- .github/boring-cyborg.yml | 49 ++++++++++++++++++++++++------ .github/workflows/publish.yml | 2 +- .gitignore | 1 + Makefile | 4 +++ 7 files changed, 134 insertions(+), 20 deletions(-) create mode 100755 .chglog/CHANGELOG.tpl.md create mode 100755 .chglog/config.yml delete mode 100644 .github/auto-label.json diff --git a/.chglog/CHANGELOG.tpl.md b/.chglog/CHANGELOG.tpl.md new file mode 100755 index 00000000000..c2b5a55cdb4 --- /dev/null +++ b/.chglog/CHANGELOG.tpl.md @@ -0,0 +1,56 @@ +{{ if .Versions -}} + +## [Unreleased] + +{{ if .Unreleased.CommitGroups -}} +{{ range .Unreleased.CommitGroups -}} +### {{ .Title }} +{{ range .Commits -}} +- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} +{{ end }} +{{ end -}} +{{ end -}} +{{ end -}} + +{{ range .Versions }} + +## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }} +{{ range .CommitGroups -}} +### {{ .Title }} +{{ range .Commits -}} +- {{ if .Scope }}**{{ upperFirst .Scope }}:** {{ end }}{{ .Subject }} +{{ end }} +{{ end -}} + +{{- if .RevertCommits -}} +### Reverts +{{ range .RevertCommits -}} +- {{ .Revert.Header }} +{{ end }} +{{ end -}} + +{{- if .MergeCommits -}} +### Pull Requests +{{ range .MergeCommits -}} +- {{ .Header }} +{{ end }} +{{ end -}} + +{{- if .NoteGroups -}} +{{ range .NoteGroups -}} +### {{ .Title }} +{{ range .Notes }} +{{ .Body }} +{{ end }} +{{ end -}} +{{ end -}} +{{ end -}} + +{{- if .Versions }} +[Unreleased]: {{ .Info.RepositoryURL }}/compare/{{ $latest := index .Versions 0 }}{{ $latest.Tag.Name }}...HEAD +{{ range .Versions -}} +{{ if .Tag.Previous -}} +[{{ .Tag.Name }}]: {{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }} +{{ end -}} +{{ end -}} +{{ end -}} diff --git a/.chglog/config.yml b/.chglog/config.yml new file mode 100755 index 00000000000..294b3289981 --- /dev/null +++ b/.chglog/config.yml @@ -0,0 +1,32 @@ +style: github +template: CHANGELOG.tpl.md +info: + title: CHANGELOG + repository_url: https://github.com/awslabs/aws-lambda-powertools-python +options: + commits: + filters: + Type: + - feat + - fix + - perf + - refactor + - docs + - chore + commit_groups: + title_maps: + feat: Features + fix: Bug Fixes + perf: Performance Improvements + refactor: Code Refactoring + docs: Documentation + chore: Project maintenance + header: + pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$" + pattern_maps: + - Type + - Scope + - Subject + notes: + keywords: + - BREAKING CHANGE diff --git a/.github/auto-label.json b/.github/auto-label.json deleted file mode 100644 index 1218790bdc4..00000000000 --- a/.github/auto-label.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "rules": { - "area/utilities": ["aws_lambda_powertools_python/utilities/", "aws_lambda_powertools_python/utilities/", "aws_lambda_powertools_python/middleware_factory/"], - "area/logger": ["aws_lambda_powertools_python/logging/"], - "area/tracer": ["aws_lambda_powertools_python/tracing/"], - "area/metrics": ["aws_lambda_powertools_python/metrics/"], - "documentation": ["docs/", "mkdocs.yml"], - "internal": ["Makefile", "CHANGELOG.md", "CONTRIBUTING.md"] - } -} diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 89d93b17e37..f79494d01aa 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -1,21 +1,52 @@ ##### Labeler ########################################################################################################## labelPRBasedOnFilePath: area/utilities: + - aws_lambda_powertools/utilities/* - aws_lambda_powertools/utilities/**/* + - aws_lambda_powertools_python/middleware_factory/* + - aws_lambda_powertools_python/middleware_factory/**/* + area/logger: + - aws_lambda_powertools_python/logging/* + - aws_lambda_powertools_python/logging/**/* + - aws_lambda_powertools_python/package_logger.py + area/tracer: + - aws_lambda_powertools_python/tracing/* + - aws_lambda_powertools_python/tracing/**/* + area/metrics: + - aws_lambda_powertools_python/metrics/* + - aws_lambda_powertools_python/metrics/**/* + area/event_handlers: + - aws_lambda_powertools_python/event_handler/* + - aws_lambda_powertools_python/event_handler/**/* - # Add 'label2' to any file changes within 'example2' folder documentation: - docs/* - docs/**/* + - mkdocs.yml -# # Complex: Add 'area/core' label to any change within the 'core' package -# area/core: -# - src/core/* -# - src/core/**/* -# -# # Add 'test' label to any change to *.spec.js files within the source dir -# test: -# - src/**/*.spec.js + internal: + - .github/* + - .github/**/* + - .chglog/* + - .flake8 + - .gitignore + - .pre-commit-config.yaml + - Makefile + - CONTRIBUTING.md + - CODE_OF_CONDUCT.md + - LICENSE + - aws_lambda_powertools_python/shared/* + - aws_lambda_powertools_python/shared/** + + dependencies: + - pyproject.toml + - poetry.lock + + tests: + - tests/* + - tests/**/* + - benchmark/* + - benchmark/**/* ##### Greetings ######################################################################################################## firstPRWelcomeComment: > diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 246992ec244..42eeae4b0f0 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,7 +4,7 @@ name: Publish to PyPi # # === Manual activities === # -# 1. Document human readable changes in CHANGELOG +# 1. Document human readable changes in CHANGELOG (pre-generate unreleased changes with `make changelog`) # 2. Bump package version using poetry version # 3. Merge version changes to develop branch # 4. Edit the current draft release notes diff --git a/.gitignore b/.gitignore index ace70c8192d..ce5e32bd3e1 100644 --- a/.gitignore +++ b/.gitignore @@ -295,6 +295,7 @@ test_report wheelhouse /.idea/* *.html +TMP_CHANGELOG.md # Docs files docs/.cache/ diff --git a/Makefile b/Makefile index b0a075d39a2..018f9c9420d 100644 --- a/Makefile +++ b/Makefile @@ -79,3 +79,7 @@ release: pr poetry build $(MAKE) release-test $(MAKE) release-prod + +changelog: + @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" + docker run -v ${PWD}:/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. -o TMP_CHANGELOG.md From fd34f0ce43b2d002a85c5845fff2b956a23f75b3 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sat, 29 May 2021 19:07:52 +0200 Subject: [PATCH 04/35] chore: enable dependabot for dep upgrades (#444) --- .github/auto-label_yml.inactive | 36 --------------------------------- .github/dependabot.yml | 13 ++++++++++++ 2 files changed, 13 insertions(+), 36 deletions(-) delete mode 100644 .github/auto-label_yml.inactive create mode 100644 .github/dependabot.yml diff --git a/.github/auto-label_yml.inactive b/.github/auto-label_yml.inactive deleted file mode 100644 index 220587fab76..00000000000 --- a/.github/auto-label_yml.inactive +++ /dev/null @@ -1,36 +0,0 @@ -# NOTE to self: Reason it doesn't work it's due to Org restrictions and how GitHub exposes token to forks - -name: PR Auto Label -on: [pull_request] -#on: -# pull_request: -# types: [opened, synchronize] -# types: [pull_request] - -jobs: -# auto-label: -# name: PR Auto Label -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v2 -# - uses: banyan/auto-label@1.2 -# env: -# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - size-label: - name: PR diff size label - runs-on: ubuntu-latest - steps: - - name: size-label - uses: codelytv/pr-size-labeler@v1 - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - xs_max_size: '10' - s_max_size: '100' - m_max_size: '500' - l_max_size: '1000' - fail_if_xl: 'false' - message_if_xl: > - 'This PR exceeds the recommended size of 1000 lines. - Please make sure you are NOT addressing multiple issues with one PR. - Note this PR might be rejected due to its size.’ - github_api_url: 'api.github.com' diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..6cd8cd8509d --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + target-branch: "develop" From da47f613e6119d6bb75606395fac6be4b39be03c Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sat, 29 May 2021 21:09:29 +0200 Subject: [PATCH 05/35] chore: enable mergify (#450) --- .github/mergify.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/mergify.yml diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 00000000000..a225925f678 --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,26 @@ +pull_request_rules: + - name: automatic merge for Dependabot pull requests + conditions: + - author~=^dependabot(|-preview)\[bot\]$ + - check-success=Build + actions: + merge: + strict: false + method: squash + commit_message: title+body + + - name: Automatic merge ⬇️ on approval ✔ + conditions: + - base!=master + - "#approved-reviews-by>=1" + - "#changes-requested-reviews-by=0" + - -title~=(WIP|wip) + - check-success=Build + - check-success=Semantic Pull Request + - body~=(?m)^\[X\] Meet tenets criteria + actions: + merge: + strict: smart + method: squash + strict_method: merge + commit_message: title+body From 105be2454aab5331ea3a58f78bcb01864feef7bf Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 29 May 2021 21:22:12 +0200 Subject: [PATCH 06/35] chore: dependabot/mergify guardrail for major versions --- .github/dependabot.yml | 14 ++++++++++++++ .github/mergify.yml | 1 + 2 files changed, 15 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 6cd8cd8509d..347ee2679b5 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,3 +11,17 @@ updates: schedule: interval: "daily" target-branch: "develop" + update_types: + - "semver:minor" + - "semver:patch" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + target-branch: "develop" + update_types: + - "semver:major" + labels: + - "do-not-merge" + - "dependencies" diff --git a/.github/mergify.yml b/.github/mergify.yml index a225925f678..c9a061c592a 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -3,6 +3,7 @@ pull_request_rules: conditions: - author~=^dependabot(|-preview)\[bot\]$ - check-success=Build + - -label~="do-not-merge" actions: merge: strict: false From 4a04d72b719544dcf44c6c42ce6a91df21a41fc4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 29 May 2021 21:33:15 +0200 Subject: [PATCH 07/35] build(deps-dev): bump pytest-asyncio from 0.14.0 to 0.15.1 (#448) Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.14.0 to 0.15.1. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.14.0...v0.15.1) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 45512c39eb1..a12780fb9d5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -789,17 +789,17 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-asyncio" -version = "0.14.0" +version = "0.15.1" description = "Pytest support for asyncio." category = "dev" optional = false -python-versions = ">= 3.5" +python-versions = ">= 3.6" [package.dependencies] pytest = ">=5.4.0" [package.extras] -testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] +testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" @@ -1064,7 +1064,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "86a0de7bd25e0ebdf9a7eb445eca6def21bb7365f4acc12e0512e233794928c9" +content-hash = "0bfd91e6e51d456c28a06552f5070c9f6c345485552d381a66a875372b7fd60b" [metadata.files] appdirs = [ @@ -1428,8 +1428,8 @@ pytest = [ {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"}, - {file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"}, + {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, + {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] pytest-cov = [ {file = "pytest-cov-2.12.0.tar.gz", hash = "sha256:8535764137fecce504a49c2b742288e3d34bc09eed298ad65963616cc98fd45e"}, diff --git a/pyproject.toml b/pyproject.toml index 622387eb60b..7d527349bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ isort = "^5.8.0" pytest-cov = "^2.11.1" pytest-mock = "^3.5.1" pdoc3 = "^0.9.2" -pytest-asyncio = "^0.14.0" +pytest-asyncio = "^0.15.1" bandit = "^1.7.0" radon = "^4.5.0" xenon = "^0.7.1" From e70e48e24c9f52f3b8ef1cb1c901b75c13ac75a7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 29 May 2021 21:55:56 +0200 Subject: [PATCH 08/35] chore: fix dependabot commit messages prefix --- .github/dependabot.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 347ee2679b5..5dddf01a1f3 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,6 +5,9 @@ updates: directory: "/" schedule: interval: "daily" + commit-message: + prefix: chore + include: scope - package-ecosystem: "pip" directory: "/" @@ -14,6 +17,9 @@ updates: update_types: - "semver:minor" - "semver:patch" + commit-message: + prefix: chore + include: scope - package-ecosystem: "pip" directory: "/" @@ -25,3 +31,6 @@ updates: labels: - "do-not-merge" - "dependencies" + commit-message: + prefix: chore + include: scope From ec1e7ab545c8e5820ab29639f4abb73fa09d8433 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 31 May 2021 21:42:56 +0200 Subject: [PATCH 09/35] chore: fix dependabot unique set config --- .github/dependabot.yml | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5dddf01a1f3..e95edbee4e6 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -14,23 +14,20 @@ updates: schedule: interval: "daily" target-branch: "develop" - update_types: - - "semver:minor" - - "semver:patch" commit-message: prefix: chore include: scope - - package-ecosystem: "pip" - directory: "/" - schedule: - interval: "daily" - target-branch: "develop" - update_types: - - "semver:major" - labels: - - "do-not-merge" - - "dependencies" - commit-message: - prefix: chore - include: scope +# - package-ecosystem: "pip" +# directory: "/" +# schedule: +# interval: "daily" +# target-branch: "develop" +# update_types: +# - "semver:major" +# labels: +# - "do-not-merge" +# - "dependencies" +# commit-message: +# prefix: chore +# include: scope From 7d8247831afe2bd5dd5a09aa85ca38e986409a7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 May 2021 21:48:51 +0200 Subject: [PATCH 10/35] chore: bump mkdocs-material from 7.1.5 to 7.1.6 (#451) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.1.5 to 7.1.6. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/docs/changelog.md) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/7.1.5...7.1.6) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index a12780fb9d5..39ed1088d30 100644 --- a/poetry.lock +++ b/poetry.lock @@ -591,7 +591,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.1.5" +version = "7.1.6" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -1064,7 +1064,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "0bfd91e6e51d456c28a06552f5070c9f6c345485552d381a66a875372b7fd60b" +content-hash = "e36e8797d8aaeab679641cb6b685f4f83102bd3dc48f1ecebf65b8f97a12f0a8" [metadata.files] appdirs = [ @@ -1341,8 +1341,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.5.tar.gz", hash = "sha256:dc4849948695e22b3385518f6237e88164a201a013c6ca8257734a2df3d0c7c6"}, - {file = "mkdocs_material-7.1.5-py2.py3-none-any.whl", hash = "sha256:d2663b7a0a6ecd435eeb6c2686cd6a1a4e3bb6b6f021464d88a9894d8533e288"}, + {file = "mkdocs-material-7.1.6.tar.gz", hash = "sha256:b3f1aaea3e79e3c3b30babe0238915cf4ad4c4560d404bb0ac3298ee2ce004a3"}, + {file = "mkdocs_material-7.1.6-py2.py3-none-any.whl", hash = "sha256:01566c460990dad54d6ec935553b9c5c8e4e753ac3e30ba0945ceeff4ad164ac"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, diff --git a/pyproject.toml b/pyproject.toml index 7d527349bfb..2006c2c7c01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ radon = "^4.5.0" xenon = "^0.7.1" flake8-eradicate = "^1.0.0" flake8-bugbear = "^21.3.2" -mkdocs-material = "^7.1.0" +mkdocs-material = "^7.1.6" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" From 0e3f4bbba87ec18fc123d0ed4e1cfe7045f041aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 May 2021 21:50:44 +0200 Subject: [PATCH 11/35] chore: bump boto3 from 1.17.78 to 1.17.84 (#449) Bumps [boto3](https://github.com/boto/boto3) from 1.17.78 to 1.17.84. - [Release notes](https://github.com/boto/boto3/releases) - [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/boto3/compare/1.17.78...1.17.84) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 39ed1088d30..3399a1144ac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.78" +version = "1.17.84" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.78,<1.21.0" +botocore = ">=1.20.84,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.78" +version = "1.20.84" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1091,12 +1091,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.78-py2.py3-none-any.whl", hash = "sha256:1a87855123df1f18081a5fb8c1abde28d0096a03f6f3ebb06bcfb77cdffdae5e"}, - {file = "boto3-1.17.78.tar.gz", hash = "sha256:2a5caee63d45fbdcc85e710c7f4146112f5d10b22fd0176643d2f2914cce54df"}, + {file = "boto3-1.17.84-py2.py3-none-any.whl", hash = "sha256:1d24c6d1f5db4b52bb29f1dfe13fd3e9d95d9fa4634b0638a096f5a884173cde"}, + {file = "boto3-1.17.84.tar.gz", hash = "sha256:8ee8766813864796be6c87ad762c6da4bfef603977931854a38f49fe4db06495"}, ] botocore = [ - {file = "botocore-1.20.78-py2.py3-none-any.whl", hash = "sha256:37105b9434d73f9c4d4960ee54c8eb129120f4c6681eb16edf483f03c5e2326d"}, - {file = "botocore-1.20.78.tar.gz", hash = "sha256:e74775f9e64e975787d76390fc5ac5aba875d726bb9ece3b7bd900205b430389"}, + {file = "botocore-1.20.84-py2.py3-none-any.whl", hash = "sha256:75e1397b80aa8757a26636b949eebd20b3cf67e8f1ed80dc01170907e06ea45d"}, + {file = "botocore-1.20.84.tar.gz", hash = "sha256:bc59eb748fcb07835613ebea6dcc2600ae1a8be0fae30e40b9c1e81b73262296"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From df574c6f6cc78264006a786ddc55f517c24fa619 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 May 2021 21:52:43 +0200 Subject: [PATCH 12/35] chore: bump xenon from 0.7.1 to 0.7.3 (#446) Bumps [xenon](https://github.com/rubik/xenon) from 0.7.1 to 0.7.3. - [Release notes](https://github.com/rubik/xenon/releases) - [Changelog](https://github.com/rubik/xenon/blob/master/CHANGELOG) - [Commits](https://github.com/rubik/xenon/compare/v0.7.1...v0.7.3) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3399a1144ac..200319846f1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1035,7 +1035,7 @@ python-versions = "*" [[package]] name = "xenon" -version = "0.7.1" +version = "0.7.3" description = "Monitor code metrics for Python on your CI server" category = "dev" optional = false @@ -1043,7 +1043,7 @@ python-versions = "*" [package.dependencies] PyYAML = ">=4.2b1,<6.0" -radon = {version = ">=4,<5", extras = ["flake8"]} +radon = ">=4,<5" requests = ">=2.0,<3.0" [[package]] @@ -1064,7 +1064,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "e36e8797d8aaeab679641cb6b685f4f83102bd3dc48f1ecebf65b8f97a12f0a8" +content-hash = "f19bacb5fc43968a9dc90d6725fdb2350aac3a3bde5ea9b3ea6bf63681c43c15" [metadata.files] appdirs = [ @@ -1678,8 +1678,8 @@ wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] xenon = [ - {file = "xenon-0.7.1-py2.py3-none-any.whl", hash = "sha256:33d807ef805a2ed854adfcc7cc998398d5c0626a5ad443e52684b998a4dd4aa3"}, - {file = "xenon-0.7.1.tar.gz", hash = "sha256:38bf283135f0636355ecf6054b6f37226af12faab152161bda1a4f9e4dc5b701"}, + {file = "xenon-0.7.3-py2.py3-none-any.whl", hash = "sha256:a167b4c329fbea7cd84b148007ba92142f46b88ca095488c175dc7a8a8007ee9"}, + {file = "xenon-0.7.3.tar.gz", hash = "sha256:eda949fbf3cfb4851d49d97e961e2b18a6b66fbecaf285dc89230775d2b2a99f"}, ] zipp = [ {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, diff --git a/pyproject.toml b/pyproject.toml index 2006c2c7c01..e4650a4538c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pdoc3 = "^0.9.2" pytest-asyncio = "^0.15.1" bandit = "^1.7.0" radon = "^4.5.0" -xenon = "^0.7.1" +xenon = "^0.7.3" flake8-eradicate = "^1.0.0" flake8-bugbear = "^21.3.2" mkdocs-material = "^7.1.6" From 7e9c7b7694464f01dda5c512264141097695869a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 May 2021 21:53:13 +0200 Subject: [PATCH 13/35] chore: bump actions/setup-python from 1 to 2.2.2 (#445) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 1 to 2.2.2. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v1...v2.2.2) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/publish.yml | 2 +- .github/workflows/python_build.yml | 2 +- .github/workflows/python_docs.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 42eeae4b0f0..19794cad093 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -35,7 +35,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: "3.8" - name: Set release notes tag diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index ddef9f6c527..6919856e18a 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index dcdd409b835..dceee36b2f9 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -13,7 +13,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v2.2.2 with: python-version: "3.8" - name: Install dependencies From 9b88322991520fcf8c1e7ca7d546ba64a729a49d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 31 May 2021 22:05:57 +0200 Subject: [PATCH 14/35] docs: include new public roadmap (#452) --- README.md | 2 +- mkdocs.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b452dd37d1a..25e7b2e343d 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A suite of Python utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. ([AWS Lambda Powertools Java](https://github.com/awslabs/aws-lambda-powertools-java) is also available). -**[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[API Docs](https://awslabs.github.io/aws-lambda-powertools-python/api/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=)** | **[🐛Bug Report](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=bug%2C+triage&template=bug_report.md&title=)** | **[Hello world example](https://github.com/aws-samples/cookiecutter-aws-sam-python)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** +**[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Roadmap](https://github.com/awslabs/aws-lambda-powertools-roadmap/projects/1)** | **[Quick hello world example](https://github.com/aws-samples/cookiecutter-aws-sam-python)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** > **Join us on the AWS Developers Slack at `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-gu30gquv-EhwIYq3kHhhysaZ2aIX7ew)** diff --git a/mkdocs.yml b/mkdocs.yml index b07e30386dd..0a761ad9540 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,6 +7,7 @@ edit_uri: edit/develop/docs nav: - Homepage: index.md - Changelog: changelog.md + - Roadmap: https://github.com/awslabs/aws-lambda-powertools-roadmap/projects/1" target="_blank - API reference: api/" target="_blank - Core utilities: - core/tracer.md From b68fd07f90155287fede27a4a2e5e76a3448b72e Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Wed, 2 Jun 2021 09:53:11 +0200 Subject: [PATCH 15/35] chore: update mergify to require approval on dependabot (#456) --- .github/mergify.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/mergify.yml b/.github/mergify.yml index c9a061c592a..ccaaadd3480 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -4,6 +4,7 @@ pull_request_rules: - author~=^dependabot(|-preview)\[bot\]$ - check-success=Build - -label~="do-not-merge" + - "#approved-reviews-by>=1" # until we exclude major versions in dependabot actions: merge: strict: false From aa28b9a317bbd75acb6beb24478c77df1d59a11a Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Wed, 2 Jun 2021 10:17:18 +0200 Subject: [PATCH 16/35] chore(mergify): use job name to match GH Actions --- .github/mergify.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/mergify.yml b/.github/mergify.yml index ccaaadd3480..dd2a439e689 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -2,7 +2,7 @@ pull_request_rules: - name: automatic merge for Dependabot pull requests conditions: - author~=^dependabot(|-preview)\[bot\]$ - - check-success=Build + - check-success=build - -label~="do-not-merge" - "#approved-reviews-by>=1" # until we exclude major versions in dependabot actions: From bec91ddc4069e368a3e4f35ae06ca6e79ae77002 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Wed, 2 Jun 2021 11:08:52 +0200 Subject: [PATCH 17/35] chore(mergify): disable check for matrix jobs mergify doesn't seem to support matrix jobs but singular, unless we use regex, so we can test later. --- .github/mergify.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/mergify.yml b/.github/mergify.yml index dd2a439e689..a98b7270199 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -2,7 +2,7 @@ pull_request_rules: - name: automatic merge for Dependabot pull requests conditions: - author~=^dependabot(|-preview)\[bot\]$ - - check-success=build +# - check-success=build # matrix jobs aren't working in mergify - -label~="do-not-merge" - "#approved-reviews-by>=1" # until we exclude major versions in dependabot actions: @@ -17,7 +17,7 @@ pull_request_rules: - "#approved-reviews-by>=1" - "#changes-requested-reviews-by=0" - -title~=(WIP|wip) - - check-success=Build +# - check-success=build # matrix jobs aren't working in mergify - check-success=Semantic Pull Request - body~=(?m)^\[X\] Meet tenets criteria actions: From 6e12012e9f5f33f9270d564a42f0182da4a5e677 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 09:09:27 +0000 Subject: [PATCH 18/35] chore(deps-dev): bump pytest-cov from 2.12.0 to 2.12.1 (#454) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 2.12.0 to 2.12.1.
Changelog

Sourced from pytest-cov's changelog.

2.12.1 (2021-06-01)

  • Changed the toml requirement to be always be directly required (instead of being required through a coverage extra). This fixes issues with pip-compile (pip-tools#1300 <https://github.com/jazzband/pip-tools/issues/1300>). Contributed by Sorin Sbarnea in [#472](https://github.com/pytest-dev/pytest-cov/issues/472) <https://github.com/pytest-dev/pytest-cov/pull/472>.
  • Documented show_contexts. Contributed by Brian Rutledge in [#473](https://github.com/pytest-dev/pytest-cov/issues/473) <https://github.com/pytest-dev/pytest-cov/pull/473>_.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=2.12.0&new-version=2.12.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 13 +++++++------ pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 200319846f1..bd78854d581 100644 --- a/poetry.lock +++ b/poetry.lock @@ -803,18 +803,19 @@ testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.12.0" +version = "2.12.1" description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = ">=5.2.1" pytest = ">=4.6" +toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" @@ -1064,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "f19bacb5fc43968a9dc90d6725fdb2350aac3a3bde5ea9b3ea6bf63681c43c15" +content-hash = "a207f2be8a6e01d19062b6293e455b0b318d45ac96cf562c9fcdf8fcc7cdf5ff" [metadata.files] appdirs = [ @@ -1432,8 +1433,8 @@ pytest-asyncio = [ {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] pytest-cov = [ - {file = "pytest-cov-2.12.0.tar.gz", hash = "sha256:8535764137fecce504a49c2b742288e3d34bc09eed298ad65963616cc98fd45e"}, - {file = "pytest_cov-2.12.0-py2.py3-none-any.whl", hash = "sha256:95d4933dcbbacfa377bb60b29801daa30d90c33981ab2a79e9ab4452c165066e"}, + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, diff --git a/pyproject.toml b/pyproject.toml index e4650a4538c..94617837137 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ flake8-fixme = "^1.1.1" flake8-isort = "^4.0.0" flake8-variables-names = "^0.0.4" isort = "^5.8.0" -pytest-cov = "^2.11.1" +pytest-cov = "^2.12.1" pytest-mock = "^3.5.1" pdoc3 = "^0.9.2" pytest-asyncio = "^0.15.1" From 01beab83e17b87e1f85fc055c4e10695a4e7635f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 09:40:42 +0000 Subject: [PATCH 19/35] chore(deps): bump boto3 from 1.17.84 to 1.17.85 (#455) Bumps [boto3](https://github.com/boto/boto3) from 1.17.84 to 1.17.85.
Changelog

Sourced from boto3's changelog.

1.17.85

  • api-change:sns: [botocore] This release adds SMS sandbox in Amazon SNS and the ability to view all configured origination numbers. The SMS sandbox provides a safe environment for sending SMS messages, without risking your reputation as an SMS sender.
  • api-change:polly: [botocore] Amazon Polly adds new Canadian French voice - Gabrielle. Gabrielle is available as Neural voice only.
  • api-change:ec2: [botocore] Added idempotency to CreateNetworkInterface using the ClientToken parameter.
  • api-change:iotwireless: [botocore] Added six new public customer logging APIs to allow customers to set/get/reset log levels at resource type and resource id level. The log level set from the APIs will be used to filter log messages that can be emitted to CloudWatch in customer accounts.
  • api-change:servicediscovery: [botocore] Bugfixes - The DiscoverInstances API operation now provides an option to return all instances for health-checked services when there are no healthy instances available.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.84&new-version=1.17.85)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index bd78854d581..60244430f3b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.84" +version = "1.17.85" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.84,<1.21.0" +botocore = ">=1.20.85,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.84" +version = "1.20.85" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1092,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.84-py2.py3-none-any.whl", hash = "sha256:1d24c6d1f5db4b52bb29f1dfe13fd3e9d95d9fa4634b0638a096f5a884173cde"}, - {file = "boto3-1.17.84.tar.gz", hash = "sha256:8ee8766813864796be6c87ad762c6da4bfef603977931854a38f49fe4db06495"}, + {file = "boto3-1.17.85-py2.py3-none-any.whl", hash = "sha256:ee82c1a97de02bd4e295b8cad440093d57869892ee5b8941a851758c45944cd5"}, + {file = "boto3-1.17.85.tar.gz", hash = "sha256:8352dffe768af9e1471323c8e443cc66a114891572bf832bec3cc2eec47838f6"}, ] botocore = [ - {file = "botocore-1.20.84-py2.py3-none-any.whl", hash = "sha256:75e1397b80aa8757a26636b949eebd20b3cf67e8f1ed80dc01170907e06ea45d"}, - {file = "botocore-1.20.84.tar.gz", hash = "sha256:bc59eb748fcb07835613ebea6dcc2600ae1a8be0fae30e40b9c1e81b73262296"}, + {file = "botocore-1.20.85-py2.py3-none-any.whl", hash = "sha256:7f54fa67b45cf767e1e4045741674cfdc47a3f424fe6f37570ae3ff1ca1e1e2a"}, + {file = "botocore-1.20.85.tar.gz", hash = "sha256:d8992096d9c04e7be331924a59677e591cce6a3c6bd3a4c8fe26b00700d5255a"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From 957ec510b840545c0fb02d178ab3fc3c11659280 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Jun 2021 07:34:17 +0000 Subject: [PATCH 20/35] chore(deps): bump boto3 from 1.17.85 to 1.17.86 (#458) Bumps [boto3](https://github.com/boto/boto3) from 1.17.85 to 1.17.86.
Changelog

Sourced from boto3's changelog.

1.17.86

  • api-change:autoscaling: [botocore] You can now launch EC2 instances with GP3 volumes when using Auto Scaling groups with Launch Configurations
  • api-change:lightsail: [botocore] Documentation updates for Lightsail
  • api-change:ecs: [botocore] Documentation updates for Amazon ECS.
  • api-change:docdb: [botocore] This SDK release adds support for DocDB global clusters.
  • api-change:iam: [botocore] Documentation updates for AWS Identity and Access Management (IAM).
  • api-change:braket: [botocore] Introduction of a RETIRED status for devices.
Commits
  • dde9752 Merge branch 'release-1.17.86'
  • 8e00fba Bumping version to 1.17.86
  • c3ca1fe Add changelog entries from botocore
  • 66f4ea6 Merge branch 'release-1.17.85' into develop
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.85&new-version=1.17.86)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 60244430f3b..9f9003746d5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.85" +version = "1.17.86" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.85,<1.21.0" +botocore = ">=1.20.86,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.85" +version = "1.20.86" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1092,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.85-py2.py3-none-any.whl", hash = "sha256:ee82c1a97de02bd4e295b8cad440093d57869892ee5b8941a851758c45944cd5"}, - {file = "boto3-1.17.85.tar.gz", hash = "sha256:8352dffe768af9e1471323c8e443cc66a114891572bf832bec3cc2eec47838f6"}, + {file = "boto3-1.17.86-py2.py3-none-any.whl", hash = "sha256:85c1875ab17c36ffb3ad1b0f4b52e3418e0fd1ef7d167ff4e545fb052b7cc1f8"}, + {file = "boto3-1.17.86.tar.gz", hash = "sha256:4f15867701b28ca78eb56cfc4ff5f0e5ee7db42558dbc445f1a7395e467ac3e9"}, ] botocore = [ - {file = "botocore-1.20.85-py2.py3-none-any.whl", hash = "sha256:7f54fa67b45cf767e1e4045741674cfdc47a3f424fe6f37570ae3ff1ca1e1e2a"}, - {file = "botocore-1.20.85.tar.gz", hash = "sha256:d8992096d9c04e7be331924a59677e591cce6a3c6bd3a4c8fe26b00700d5255a"}, + {file = "botocore-1.20.86-py2.py3-none-any.whl", hash = "sha256:2a48154fd7d61a67d861b0781e204918508aa8af094391f03ad4047c979dc9c7"}, + {file = "botocore-1.20.86.tar.gz", hash = "sha256:bbdfd2adedb0cc9117cf411ef51cbd8fc19798e21e414f831ad9781e507ff1da"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From dc80b35c8c6ec8e24eae89db543ed3099fbf2462 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 4 Jun 2021 12:00:58 +0200 Subject: [PATCH 21/35] docs(idempotency): remove old todo --- docs/utilities/idempotency.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index bd9a8f8e98b..a684695b36c 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -85,8 +85,6 @@ TTL attribute name | `expiration` | This can only be configured after your table see 1WCU and 1RCU. Review the [DynamoDB pricing documentation](https://aws.amazon.com/dynamodb/pricing/) to estimate the cost. -!!! danger "CREATE SECTION FOR PERSISTENCE LAYERS" - ### Idempotent decorator You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. From 31dc88c86f3f7313b2adfdebdb419a31011cf50c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Jun 2021 10:22:25 +0000 Subject: [PATCH 22/35] chore(deps): bump boto3 from 1.17.86 to 1.17.87 (#459) Bumps [boto3](https://github.com/boto/boto3) from 1.17.86 to 1.17.87.
Changelog

Sourced from boto3's changelog.

1.17.87

  • api-change:s3: [botocore] S3 Inventory now supports Bucket Key Status
  • api-change:s3control: [botocore] Amazon S3 Batch Operations now supports S3 Bucket Keys.
  • api-change:route53resolver: [botocore] Documentation updates for Route 53 Resolver
  • api-change:ssm: [botocore] Documentation updates for ssm to fix customer reported issue
  • api-change:forecast: [botocore] Added optional field AutoMLOverrideStrategy to CreatePredictor API that allows users to customize AutoML strategy. If provided in CreatePredictor request, this field is visible in DescribePredictor and GetAccuracyMetrics responses.
Commits
  • 07652b4 Merge branch 'release-1.17.87'
  • 7ead60b Bumping version to 1.17.87
  • 23d42a1 Add changelog entries from botocore
  • 468fc14 Merge branch 'release-1.17.86' into develop
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.86&new-version=1.17.87)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f9003746d5..fe8d75b0905 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.86" +version = "1.17.87" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.86,<1.21.0" +botocore = ">=1.20.87,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.86" +version = "1.20.87" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1092,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.86-py2.py3-none-any.whl", hash = "sha256:85c1875ab17c36ffb3ad1b0f4b52e3418e0fd1ef7d167ff4e545fb052b7cc1f8"}, - {file = "boto3-1.17.86.tar.gz", hash = "sha256:4f15867701b28ca78eb56cfc4ff5f0e5ee7db42558dbc445f1a7395e467ac3e9"}, + {file = "boto3-1.17.87-py2.py3-none-any.whl", hash = "sha256:52025e0af7935cb7036037978de190e41ad7f6716d1de1e3669976e99d084edf"}, + {file = "boto3-1.17.87.tar.gz", hash = "sha256:612aa5dc27b87ae1dc695e194f97af7da0fcc9e97aa80d9740732d78ba117119"}, ] botocore = [ - {file = "botocore-1.20.86-py2.py3-none-any.whl", hash = "sha256:2a48154fd7d61a67d861b0781e204918508aa8af094391f03ad4047c979dc9c7"}, - {file = "botocore-1.20.86.tar.gz", hash = "sha256:bbdfd2adedb0cc9117cf411ef51cbd8fc19798e21e414f831ad9781e507ff1da"}, + {file = "botocore-1.20.87-py2.py3-none-any.whl", hash = "sha256:3dcc84855349073e0cb706e90a9e1180899deded4b8555698fb4a5a5b3357202"}, + {file = "botocore-1.20.87.tar.gz", hash = "sha256:04a5594ae1886233cb15ab636b51aeecf6b5412231f72744405f11a54a8cda58"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From 675cc243f9dcf6f660bb92072e1a83f571dc5764 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Sat, 5 Jun 2021 08:50:37 -0700 Subject: [PATCH 23/35] feat(data-classes): decorator to instantiate data_classes and docs updates (#442) --- .../utilities/data_classes/__init__.py | 2 + .../utilities/data_classes/alb_event.py | 2 + .../utilities/data_classes/event_source.py | 39 ++ .../utilities/idempotency/persistence/base.py | 1 + docs/utilities/data_classes.md | 456 +++++++++--------- .../idempotency/test_idempotency.py | 43 +- tests/functional/test_data_classes.py | 13 + 7 files changed, 339 insertions(+), 217 deletions(-) create mode 100644 aws_lambda_powertools/utilities/data_classes/event_source.py diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index a47c32ee07f..c5391880122 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -10,6 +10,7 @@ from .connect_contact_flow_event import ConnectContactFlowEvent from .dynamo_db_stream_event import DynamoDBStreamEvent from .event_bridge_event import EventBridgeEvent +from .event_source import event_source from .kinesis_stream_event import KinesisStreamEvent from .s3_event import S3Event from .ses_event import SESEvent @@ -31,4 +32,5 @@ "SESEvent", "SNSEvent", "SQSEvent", + "event_source", ] diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 73e064d0f26..159779c86a7 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -6,6 +6,7 @@ class ALBEventRequestContext(DictWrapper): @property def elb_target_group_arn(self) -> str: + """Target group arn for your Lambda function""" return self["requestContext"]["elb"]["targetGroupArn"] @@ -15,6 +16,7 @@ class ALBEvent(BaseProxyEvent): Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/services-alb.html + - https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html """ @property diff --git a/aws_lambda_powertools/utilities/data_classes/event_source.py b/aws_lambda_powertools/utilities/data_classes/event_source.py new file mode 100644 index 00000000000..3968f923573 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/event_source.py @@ -0,0 +1,39 @@ +from typing import Any, Callable, Dict, Type + +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@lambda_handler_decorator +def event_source( + handler: Callable[[Any, LambdaContext], Any], + event: Dict[str, Any], + context: LambdaContext, + data_class: Type[DictWrapper], +): + """Middleware to create an instance of the passed in event source data class + + Parameters + ---------- + handler: Callable + Lambda's handler + event: Dict + Lambda's Event + context: Dict + Lambda's Context + data_class: Type[DictWrapper] + Data class type to instantiate + + Example + -------- + + **Sample usage** + + from aws_lambda_powertools.utilities.data_classes import S3Event, event_source + + @event_source(data_class=S3Event) + def handler(event: S3Event, context): + return {"key": event.object_key} + """ + return handler(data_class(event), context) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 0cbd34213c1..31aef6dc0f2 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -224,6 +224,7 @@ def _generate_hash(self, data: Any) -> str: Hashed representation of the provided data """ + data = getattr(data, "raw_event", data) # could be a data class depending on decorator order hashed_data = self.hash_function(json.dumps(data, cls=Encoder).encode()) return hashed_data.hexdigest() diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 0fc33d3a3f7..5b0d0db8c0a 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -21,22 +21,35 @@ Lambda function. ### Utilizing the data classes -The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class. +The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class or +by using the `event_source` decorator. For example, if your Lambda function is being triggered by an API Gateway proxy integration, you can use the `APIGatewayProxyEvent` class. === "app.py" - ```python hl_lines="1 4" - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent +```python hl_lines="1 4" +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent - def lambda_handler(event, context): - event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) +def lambda_handler(event: dict, context): + event = APIGatewayProxyEvent(event) + if 'helloworld' in event.path and event.http_method == 'GET': + do_something_with(event.body, user) +``` - if 'helloworld' in event.path and event.http_method == 'GET': - do_something_with(event.body, user) - ``` +Same example as above, but using the `event_source` decorator + +=== "app.py" + +```python hl_lines="1 3" +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent + +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if 'helloworld' in event.path and event.http_method == 'GET': + do_something_with(event.body, user) +``` **Autocomplete with self-documented properties and methods** @@ -49,7 +62,8 @@ For example, if your Lambda function is being triggered by an API Gateway proxy Event Source | Data_class ------------------------------------------------- | --------------------------------------------------------------------------------- [API Gateway Proxy](#api-gateway-proxy) | `APIGatewayProxyEvent` -[API Gateway Proxy event v2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` +[API Gateway Proxy V2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2` +[Application Load Balancer](#application-load-balancer) | `ALBEvent` [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` @@ -76,34 +90,47 @@ It is used for either API Gateway REST API or HTTP API using v1 proxy event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent - def lambda_handler(event, context): - event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if "helloworld" in event.path and event.http_method == "GET": request_context = event.request_context identity = request_context.identity + user = identity.user + do_something_with(event.json_body, user) +``` - if 'helloworld' in event.path and event.http_method == 'GET': - user = identity.user - do_something_with(event.body, user) - ``` +### API Gateway Proxy V2 -### API Gateway Proxy v2 +It is used for HTTP API using v2 proxy event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2 +```python +from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEventV2 - def lambda_handler(event, context): - event: APIGatewayProxyEventV2 = APIGatewayProxyEventV2(event) - request_context = event.request_context - query_string_parameters = event.query_string_parameters +@event_source(data_class=APIGatewayProxyEventV2) +def lambda_handler(event: APIGatewayProxyEventV2, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) +``` - if 'helloworld' in event.raw_path and request_context.http.method == 'POST': - do_something_with(event.body, query_string_parameters) - ``` +### Application Load Balancer + +Is it used for Application load balancer event. + +=== "app.py" + +```python +from aws_lambda_powertools.utilities.data_classes import event_source, ALBEvent + +@event_source(data_class=ALBEvent) +def lambda_handler(event: ALBEvent, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) +``` ### AppSync Resolver @@ -210,18 +237,17 @@ decompress and parse json data from the event. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import CloudWatchLogsEvent - from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData +```python +from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchLogsEvent +from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData - def lambda_handler(event, context): - event: CloudWatchLogsEvent = CloudWatchLogsEvent(event) - - decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data - log_events = decompressed_log.log_events - for event in log_events: - do_something_with(event.timestamp, event.message) - ``` +@event_source(data_class=CloudWatchLogsEvent) +def lambda_handler(event: CloudWatchLogsEvent, context): + decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data + log_events = decompressed_log.log_events + for event in log_events: + do_something_with(event.timestamp, event.message) +``` ### CodePipeline Job @@ -229,51 +255,50 @@ Data classes and utility functions to help create continuous delivery pipelines === "app.py" - ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent +```python +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import event_source, CodePipelineJobEvent - logger = Logger() +logger = Logger() +@event_source(data_class=CodePipelineJobEvent) +def lambda_handler(event, context): + """The Lambda function handler - def lambda_handler(event, context): - """The Lambda function handler - - If a continuing job then checks the CloudFormation stack status - and updates the job accordingly. - - If a new job then kick of an update or creation of the target - CloudFormation stack. - """ - event: CodePipelineJobEvent = CodePipelineJobEvent(event) - - # Extract the Job ID - job_id = event.get_id - - # Extract the params - params: dict = event.decoded_user_parameters - stack = params["stack"] - artifact_name = params["artifact"] - template_file = params["file"] - - try: - if event.data.continuation_token: - # If we're continuing then the create/update has already been triggered - # we just need to check if it has finished. - check_stack_update_status(job_id, stack) - else: - template = event.get_artifact(artifact_name, template_file) - # Kick off a stack update or create - start_update_or_create(job_id, stack, template) - except Exception as e: - # If any other exceptions which we didn't expect are raised - # then fail the job and log the exception message. - logger.exception("Function failed due to exception.") - put_job_failure(job_id, "Function exception: " + str(e)) - - logger.debug("Function complete.") - return "Complete." - ``` + If a continuing job then checks the CloudFormation stack status + and updates the job accordingly. + + If a new job then kick of an update or creation of the target + CloudFormation stack. + """ + + # Extract the Job ID + job_id = event.get_id + + # Extract the params + params: dict = event.decoded_user_parameters + stack = params["stack"] + artifact_name = params["artifact"] + template_file = params["file"] + + try: + if event.data.continuation_token: + # If we're continuing then the create/update has already been triggered + # we just need to check if it has finished. + check_stack_update_status(job_id, stack) + else: + template = event.get_artifact(artifact_name, template_file) + # Kick off a stack update or create + start_update_or_create(job_id, stack, template) + except Exception as e: + # If any other exceptions which we didn't expect are raised + # then fail the job and log the exception message. + logger.exception("Function failed due to exception.") + put_job_failure(job_id, "Function exception: " + str(e)) + + logger.debug("Function complete.") + return "Complete." +``` ### Cognito User Pool @@ -297,15 +322,15 @@ Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChalleng === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent - def lambda_handler(event, context): - event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) +def lambda_handler(event, context): + event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) - user_attributes = event.request.user_attributes - do_something_with(user_attributes) - ``` + user_attributes = event.request.user_attributes + do_something_with(user_attributes) +``` #### Define Auth Challenge Example @@ -470,17 +495,18 @@ This example is based on the AWS Cognito docs for [Create Auth Challenge Lambda === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent - def handler(event: dict, context) -> dict: - event: CreateAuthChallengeTriggerEvent = CreateAuthChallengeTriggerEvent(event) - if event.request.challenge_name == "CUSTOM_CHALLENGE": - event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} - event.response.private_challenge_parameters = {"answer": "5"} - event.response.challenge_metadata = "CAPTCHA_CHALLENGE" - return event.raw_event - ``` +@event_source(data_class=CreateAuthChallengeTriggerEvent) +def handler(event: CreateAuthChallengeTriggerEvent, context) -> dict: + if event.request.challenge_name == "CUSTOM_CHALLENGE": + event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} + event.response.private_challenge_parameters = {"answer": "5"} + event.response.challenge_metadata = "CAPTCHA_CHALLENGE" + return event.raw_event +``` #### Verify Auth Challenge Response Example @@ -488,16 +514,17 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent - def handler(event: dict, context) -> dict: - event: VerifyAuthChallengeResponseTriggerEvent = VerifyAuthChallengeResponseTriggerEvent(event) - event.response.answer_correct = ( - event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer - ) - return event.raw_event - ``` +@event_source(data_class=VerifyAuthChallengeResponseTriggerEvent) +def handler(event: VerifyAuthChallengeResponseTriggerEvent, context) -> dict: + event.response.answer_correct = ( + event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer + ) + return event.raw_event +``` ### Connect Contact Flow @@ -505,21 +532,21 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( - ConnectContactFlowChannel, - ConnectContactFlowEndpointType, - ConnectContactFlowEvent, - ConnectContactFlowInitiationMethod, - ) - - def lambda_handler(event, context): - event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) - assert event.contact_data.attributes == {"Language": "en-US"} - assert event.contact_data.channel == ConnectContactFlowChannel.VOICE - assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER - assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API - ``` +```python +from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( + ConnectContactFlowChannel, + ConnectContactFlowEndpointType, + ConnectContactFlowEvent, + ConnectContactFlowInitiationMethod, +) + +def lambda_handler(event, context): + event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) + assert event.contact_data.attributes == {"Language": "en-US"} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API +``` ### DynamoDB Streams @@ -529,34 +556,34 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - DynamoDBStreamEvent, - DynamoDBRecordEventName - ) +```python +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBStreamEvent, + DynamoDBRecordEventName +) - def lambda_handler(event, context): - event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) +def lambda_handler(event, context): + event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) - # Multiple records can be delivered in a single event - for record in event.records: - if record.event_name == DynamoDBRecordEventName.MODIFY: - do_something_with(record.dynamodb.new_image) - do_something_with(record.dynamodb.old_image) - ``` + # Multiple records can be delivered in a single event + for record in event.records: + if record.event_name == DynamoDBRecordEventName.MODIFY: + do_something_with(record.dynamodb.new_image) + do_something_with(record.dynamodb.old_image) +``` ### EventBridge === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent - def lambda_handler(event, context): - event: EventBridgeEvent = EventBridgeEvent(event) - do_something_with(event.detail) +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event: EventBridgeEvent, context): + do_something_with(event.detail) - ``` +``` ### Kinesis streams @@ -565,40 +592,40 @@ or plain text, depending on the original payload. === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import KinesisStreamEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, KinesisStreamEvent - def lambda_handler(event, context): - event: KinesisStreamEvent = KinesisStreamEvent(event) - kinesis_record = next(event.records).kinesis +@event_source(data_class=KinesisStreamEvent) +def lambda_handler(event: KinesisStreamEvent, context): + kinesis_record = next(event.records).kinesis - # if data was delivered as text - data = kinesis_record.data_as_text() + # if data was delivered as text + data = kinesis_record.data_as_text() - # if data was delivered as json - data = kinesis_record.data_as_json() + # if data was delivered as json + data = kinesis_record.data_as_json() - do_something_with(data) - ``` + do_something_with(data) +``` ### S3 === "app.py" - ```python - from urllib.parse import unquote_plus - from aws_lambda_powertools.utilities.data_classes import S3Event +```python +from urllib.parse import unquote_plus +from aws_lambda_powertools.utilities.data_classes import event_source, S3Event - def lambda_handler(event, context): - event: S3Event = S3Event(event) - bucket_name = event.bucket_name +@event_source(data_class=S3Event) +def lambda_handler(event: S3Event, context): + bucket_name = event.bucket_name - # Multiple records can be delivered in a single event - for record in event.records: - object_key = unquote_plus(record.s3.get_object.key) + # Multiple records can be delivered in a single event + for record in event.records: + object_key = unquote_plus(record.s3.get_object.key) - do_something_with(f'{bucket_name}/{object_key}') - ``` + do_something_with(f"{bucket_name}/{object_key}") +``` ### S3 Object Lambda @@ -606,84 +633,81 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" - ```python hl_lines="5-6 12 14" - import boto3 - import requests +```python hl_lines="5-6 12 14" +import boto3 +import requests - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA - from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA +from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent - logger = Logger() - session = boto3.Session() - s3 = session.client("s3") +logger = Logger() +session = boto3.Session() +s3 = session.client("s3") - @logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) - def lambda_handler(event, context): - event = S3ObjectLambdaEvent(event) +@logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) +def lambda_handler(event, context): + event = S3ObjectLambdaEvent(event) - # Get object from S3 - response = requests.get(event.input_s3_url) - original_object = response.content.decode("utf-8") + # Get object from S3 + response = requests.get(event.input_s3_url) + original_object = response.content.decode("utf-8") - # Make changes to the object about to be returned - transformed_object = original_object.upper() + # Make changes to the object about to be returned + transformed_object = original_object.upper() - # Write object back to S3 Object Lambda - s3.write_get_object_response( - Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token - ) + # Write object back to S3 Object Lambda + s3.write_get_object_response( + Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token + ) - return {"status_code": 200} - ``` + return {"status_code": 200} +``` ### SES === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SESEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SESEvent - def lambda_handler(event, context): - event: SESEvent = SESEvent(event) +@event_source(data_class=SESEvent) +def lambda_handler(event: SESEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + mail = record.ses.mail + common_headers = mail.common_headers - # Multiple records can be delivered in a single event - for record in event.records: - mail = record.ses.mail - common_headers = mail.common_headers - - do_something_with(common_headers.to, common_headers.subject) - ``` + do_something_with(common_headers.to, common_headers.subject) +``` ### SNS === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SNSEvent +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SNSEvent - def lambda_handler(event, context): - event: SNSEvent = SNSEvent(event) +@event_source(data_class=SNSEvent) +def lambda_handler(event: SNSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + message = record.sns.message + subject = record.sns.subject - # Multiple records can be delivered in a single event - for record in event.records: - message = record.sns.message - subject = record.sns.subject - - do_something_with(subject, message) - ``` + do_something_with(subject, message) +``` ### SQS === "app.py" - ```python - from aws_lambda_powertools.utilities.data_classes import SQSEvent - - def lambda_handler(event, context): - event: SQSEvent = SQSEvent(event) +```python +from aws_lambda_powertools.utilities.data_classes import event_source, SQSEvent - # Multiple records can be delivered in a single event - for record in event.records: - do_something_with(record.body) - ``` +@event_source(data_class=SQSEvent) +def lambda_handler(event: SQSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + do_something_with(record.body) +``` diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 25f76af48be..0cf19ab9de0 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -1,4 +1,5 @@ import copy +import hashlib import json import sys from hashlib import md5 @@ -7,6 +8,7 @@ import pytest from botocore import stub +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2, event_source from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, @@ -19,6 +21,7 @@ from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord from aws_lambda_powertools.utilities.validation import envelopes, validator +from tests.functional.utils import load_event TABLE_NAME = "TEST_TABLE" @@ -223,7 +226,7 @@ def lambda_handler(event, context): def test_idempotent_lambda_first_execution_cached( idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer, - lambda_apigw_event: DynamoDBPersistenceLayer, + lambda_apigw_event, expected_params_update_item, expected_params_put_item, lambda_response, @@ -845,3 +848,41 @@ def handler(event, context): handler({}, lambda_context) assert "No data found to create a hashed idempotency_key" == e.value.args[0] + + +class MockPersistenceLayer(BasePersistenceLayer): + def __init__(self, expected_idempotency_key: str): + self.expected_idempotency_key = expected_idempotency_key + super(MockPersistenceLayer, self).__init__() + + def _put_record(self, data_record: DataRecord) -> None: + assert data_record.idempotency_key == self.expected_idempotency_key + + def _update_record(self, data_record: DataRecord) -> None: + assert data_record.idempotency_key == self.expected_idempotency_key + + def _get_record(self, idempotency_key) -> DataRecord: + ... + + def _delete_record(self, data_record: DataRecord) -> None: + ... + + +def test_idempotent_lambda_event_source(lambda_context): + # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator + mock_event = load_event("apiGatewayProxyV2Event.json") + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + expected_result = {"message": "Foo"} + + # GIVEN an event_source decorator + # AND then an idempotent decorator + @event_source(data_class=APIGatewayProxyEventV2) + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + assert isinstance(event, APIGatewayProxyEventV2) + return expected_result + + # WHEN calling the lambda handler + result = lambda_handler(mock_event, lambda_context) + # THEN we expect the handler to execute successfully + assert result == expected_result diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 07648f84ee9..60dfc591897 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -62,6 +62,7 @@ DynamoDBStreamEvent, StreamViewType, ) +from aws_lambda_powertools.utilities.data_classes.event_source import event_source from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent from tests.functional.utils import load_event @@ -1237,3 +1238,15 @@ def download_file(bucket: str, key: str, tmp_name: str): } ) assert artifact_str == file_contents + + +def test_reflected_types(): + # GIVEN an event_source decorator + @event_source(data_class=APIGatewayProxyEventV2) + def lambda_handler(event: APIGatewayProxyEventV2, _): + # THEN we except the event to be of the pass in data class type + assert isinstance(event, APIGatewayProxyEventV2) + assert event.get_header_value("x-foo") == "Foo" + + # WHEN calling the lambda handler + lambda_handler({"headers": {"X-Foo": "Foo"}}, None) From 6465d9ee11ead2b348bd5fca05f4f07ea80bc804 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Jun 2021 08:16:50 +0000 Subject: [PATCH 24/35] chore(deps-dev): bump mkdocs-material from 7.1.6 to 7.1.7 (#464) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.1.6 to 7.1.7.
Release notes

Sourced from mkdocs-material's releases.

mkdocs-material-7.1.7

  • Improved screen reader support
Changelog

Sourced from mkdocs-material's changelog.

7.1.7 _ June 6, 2021

  • Improved screen reader support
Commits
  • 3d6cbc9 Updated dependencies
  • 7485a61 Prepare 7.1.7 release
  • 11fcf4f Added aria labels to language and version selector
  • 2a5ba14 Added aria-label to language selector button (#2725)
  • 42500aa Added aria labels to palette toggles
  • 0dd4831 Added aria labels to footer links
  • a201390 Formatting
  • db83978 Updated note on now removed Docker image for Insiders
  • 86f49eb Updated dependencies
  • 85d9a56 Updated Insiders changelog
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mkdocs-material&package-manager=pip&previous-version=7.1.6&new-version=7.1.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index fe8d75b0905..7d7537a0dc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -591,7 +591,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.1.6" +version = "7.1.7" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -1065,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "a207f2be8a6e01d19062b6293e455b0b318d45ac96cf562c9fcdf8fcc7cdf5ff" +content-hash = "3159635f02dd232e8271d6fd4f6b1b92cefb6f8b8ada60bda6929f3839515862" [metadata.files] appdirs = [ @@ -1342,8 +1342,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.6.tar.gz", hash = "sha256:b3f1aaea3e79e3c3b30babe0238915cf4ad4c4560d404bb0ac3298ee2ce004a3"}, - {file = "mkdocs_material-7.1.6-py2.py3-none-any.whl", hash = "sha256:01566c460990dad54d6ec935553b9c5c8e4e753ac3e30ba0945ceeff4ad164ac"}, + {file = "mkdocs-material-7.1.7.tar.gz", hash = "sha256:34d57af1e3e68ff4251feb82ced70545d8aa6064861ba76b1a15928399d21879"}, + {file = "mkdocs_material-7.1.7-py2.py3-none-any.whl", hash = "sha256:1725d02efed5d989258fd1620673e78a7171f82028f30c2da8d21e7539150221"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, diff --git a/pyproject.toml b/pyproject.toml index 94617837137..13f6d723efc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ radon = "^4.5.0" xenon = "^0.7.3" flake8-eradicate = "^1.0.0" flake8-bugbear = "^21.3.2" -mkdocs-material = "^7.1.6" +mkdocs-material = "^7.1.7" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" From b029b5c3ffdbcb8080f4d9313ff36150412c088e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Jun 2021 08:18:41 +0000 Subject: [PATCH 25/35] chore(deps): bump boto3 from 1.17.87 to 1.17.88 (#463) Bumps [boto3](https://github.com/boto/boto3) from 1.17.87 to 1.17.88.
Changelog

Sourced from boto3's changelog.

1.17.88

  • api-change:rds: [botocore] Documentation updates for RDS: fixing an outdated link to the RDS documentation in DBInstance$DBInstanceStatus
  • api-change:pi: [botocore] The new GetDimensionKeyDetails action retrieves the attributes of the specified dimension group for a DB instance or data source.
  • api-change:cloudtrail: [botocore] AWS CloudTrail supports data events on new service resources, including Amazon DynamoDB tables and S3 Object Lambda access points.
  • api-change:medialive: [botocore] Add support for automatically setting the H.264 adaptive quantization and GOP B-frame fields.
  • api-change:autoscaling: [botocore] Documentation updates for Amazon EC2 Auto Scaling
  • api-change:qldb: [botocore] Documentation updates for Amazon QLDB
Commits
  • 16b19ea Merge branch 'release-1.17.88'
  • d167189 Bumping version to 1.17.88
  • b6dbef8 Add changelog entries from botocore
  • 524a83d Merge branch 'release-1.17.87' into develop
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.87&new-version=1.17.88)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7d7537a0dc7..cb9bed06939 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.87" +version = "1.17.88" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.87,<1.21.0" +botocore = ">=1.20.88,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.87" +version = "1.20.88" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1092,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.87-py2.py3-none-any.whl", hash = "sha256:52025e0af7935cb7036037978de190e41ad7f6716d1de1e3669976e99d084edf"}, - {file = "boto3-1.17.87.tar.gz", hash = "sha256:612aa5dc27b87ae1dc695e194f97af7da0fcc9e97aa80d9740732d78ba117119"}, + {file = "boto3-1.17.88-py2.py3-none-any.whl", hash = "sha256:13afcc5e2fcc5e4f9eab1ee46a769cf738a259dcd45f71ee79255f18973e4584"}, + {file = "boto3-1.17.88.tar.gz", hash = "sha256:a715ca6c4457d56ea3e3efde9bdc8be41c29b2f2a904fbd12befdb9cb5e289e4"}, ] botocore = [ - {file = "botocore-1.20.87-py2.py3-none-any.whl", hash = "sha256:3dcc84855349073e0cb706e90a9e1180899deded4b8555698fb4a5a5b3357202"}, - {file = "botocore-1.20.87.tar.gz", hash = "sha256:04a5594ae1886233cb15ab636b51aeecf6b5412231f72744405f11a54a8cda58"}, + {file = "botocore-1.20.88-py2.py3-none-any.whl", hash = "sha256:be3cb73fab60a2349e2932bd0cbbe7e7736e3a2cd8c05b539d362ff3e406be76"}, + {file = "botocore-1.20.88.tar.gz", hash = "sha256:bc989edab52d4788aadd8d1aff925f5c6a7cbc68900bfdb8e379965aeac17317"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From a65e55ce7d5468be61fbe043c6e6296387d2e862 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Mon, 7 Jun 2021 21:11:11 -0700 Subject: [PATCH 26/35] feat(data-classes): add AttributeValueType to DynamoDBStreamEvent (#462) --- .../data_classes/dynamo_db_stream_event.py | 77 +++++++++++++++- tests/functional/test_data_classes.py | 90 +++++++++++++++++++ 2 files changed, 165 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py index bc3a4a82995..1ec3d6157bf 100644 --- a/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py +++ b/aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py @@ -1,15 +1,42 @@ from enum import Enum -from typing import Dict, Iterator, List, Optional +from typing import Any, Dict, Iterator, List, Optional, Union from aws_lambda_powertools.utilities.data_classes.common import DictWrapper +class AttributeValueType(Enum): + Binary = "B" + BinarySet = "BS" + Boolean = "BOOL" + List = "L" + Map = "M" + Number = "N" + NumberSet = "NS" + Null = "NULL" + String = "S" + StringSet = "SS" + + class AttributeValue(DictWrapper): """Represents the data for an attribute - Documentation: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html + Documentation: + -------------- + - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html + - https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html """ + def __init__(self, data: Dict[str, Any]): + """AttributeValue constructor + + Parameters + ---------- + data: Dict[str, Any] + Raw lambda event dict + """ + super().__init__(data) + self.dynamodb_type = list(data.keys())[0] + @property def b_value(self) -> Optional[str]: """An attribute of type Base64-encoded binary data object @@ -106,6 +133,29 @@ def ss_value(self) -> Optional[List[str]]: """ return self.get("SS") + @property + def get_type(self) -> AttributeValueType: + """Get the attribute value type based on the contained data""" + return AttributeValueType(self.dynamodb_type) + + @property + def l_value(self) -> Optional[List["AttributeValue"]]: + """Alias of list_value""" + return self.list_value + + @property + def m_value(self) -> Optional[Dict[str, "AttributeValue"]]: + """Alias of map_value""" + return self.map_value + + @property + def get_value(self) -> Union[Optional[bool], Optional[str], Optional[List], Optional[Dict]]: + """Get the attribute value""" + try: + return getattr(self, f"{self.dynamodb_type.lower()}_value") + except AttributeError: + raise TypeError(f"Dynamodb type {self.dynamodb_type} is not supported") + def _attribute_value_dict(attr_values: Dict[str, dict], key: str) -> Optional[Dict[str, AttributeValue]]: """A dict of type String to AttributeValue object map @@ -224,6 +274,29 @@ class DynamoDBStreamEvent(DictWrapper): Documentation: ------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html + + Example + ------- + **Process dynamodb stream events and use get_type and get_value for handling conversions** + + from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + AttributeValueType, + AttributeValue, + ) + from aws_lambda_powertools.utilities.typing import LambdaContext + + + @event_source(data_class=DynamoDBStreamEvent) + def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): + for record in event.records: + key: AttributeValue = record.dynamodb.keys["id"] + if key == AttributeValueType.Number: + assert key.get_value == key.n_value + print(key.get_value) + elif key == AttributeValueType.Map: + assert key.get_value == key.map_value + print(key.get_value) """ @property diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 60dfc591897..8b412860694 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -58,6 +58,7 @@ ) from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( AttributeValue, + AttributeValueType, DynamoDBRecordEventName, DynamoDBStreamEvent, StreamViewType, @@ -443,6 +444,33 @@ def test_dynamo_db_stream_trigger_event(): assert record.user_identity is None +def test_dynamo_attribute_value_b_value(): + example_attribute_value = {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Binary + assert attribute_value.b_value == attribute_value.get_value + + +def test_dynamo_attribute_value_bs_value(): + example_attribute_value = {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.BinarySet + assert attribute_value.bs_value == attribute_value.get_value + + +def test_dynamo_attribute_value_bool_value(): + example_attribute_value = {"BOOL": True} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Boolean + assert attribute_value.bool_value == attribute_value.get_value + + def test_dynamo_attribute_value_list_value(): example_attribute_value = {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]} attribute_value = AttributeValue(example_attribute_value) @@ -450,6 +478,9 @@ def test_dynamo_attribute_value_list_value(): assert list_value is not None item = list_value[0] assert item.s_value == "Cookies" + assert attribute_value.get_type == AttributeValueType.List + assert attribute_value.l_value == attribute_value.list_value + assert attribute_value.list_value == attribute_value.get_value def test_dynamo_attribute_value_map_value(): @@ -461,6 +492,65 @@ def test_dynamo_attribute_value_map_value(): assert map_value is not None item = map_value["Name"] assert item.s_value == "Joe" + assert attribute_value.get_type == AttributeValueType.Map + assert attribute_value.m_value == attribute_value.map_value + assert attribute_value.map_value == attribute_value.get_value + + +def test_dynamo_attribute_value_n_value(): + example_attribute_value = {"N": "123.45"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Number + assert attribute_value.n_value == attribute_value.get_value + + +def test_dynamo_attribute_value_ns_value(): + example_attribute_value = {"NS": ["42.2", "-19", "7.5", "3.14"]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.NumberSet + assert attribute_value.ns_value == attribute_value.get_value + + +def test_dynamo_attribute_value_null_value(): + example_attribute_value = {"NULL": True} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.Null + assert attribute_value.null_value == attribute_value.get_value + + +def test_dynamo_attribute_value_s_value(): + example_attribute_value = {"S": "Hello"} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.String + assert attribute_value.s_value == attribute_value.get_value + + +def test_dynamo_attribute_value_ss_value(): + example_attribute_value = {"SS": ["Giraffe", "Hippo", "Zebra"]} + + attribute_value = AttributeValue(example_attribute_value) + + assert attribute_value.get_type == AttributeValueType.StringSet + assert attribute_value.ss_value == attribute_value.get_value + + +def test_dynamo_attribute_value_type_error(): + example_attribute_value = {"UNSUPPORTED": "'value' should raise a type error"} + + attribute_value = AttributeValue(example_attribute_value) + + with pytest.raises(TypeError): + print(attribute_value.get_value) + with pytest.raises(ValueError): + print(attribute_value.get_type) def test_event_bridge_event(): From 070428aac582148d99e188f0bdf87ef2e76c45d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Jun 2021 07:08:56 +0000 Subject: [PATCH 27/35] chore(deps): bump boto3 from 1.17.88 to 1.17.89 (#466) Bumps [boto3](https://github.com/boto/boto3) from 1.17.88 to 1.17.89.
Changelog

Sourced from boto3's changelog.

1.17.89

  • api-change:sagemaker: [botocore] AWS SageMaker - Releasing new APIs related to Callback steps in model building pipelines. Adds experiment integration to model building pipelines.
  • api-change:glue: [botocore] Add SampleSize variable to S3Target to enable s3-sampling feature through API.
  • api-change:personalize: [botocore] Update regex validation in kmsKeyArn and s3 path API parameters for AWS Personalize APIs
  • api-change:eks: [botocore] Added updateConfig option that allows customers to control upgrade velocity in Managed Node Group.
Commits
  • 1325ffa Merge branch 'release-1.17.89'
  • 6c8075b Bumping version to 1.17.89
  • ab25d49 Add changelog entries from botocore
  • e0d6eff Merge branch 'release-1.17.88' into develop
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.17.88&new-version=1.17.89)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb9bed06939..ca9ac489f80 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.88" +version = "1.17.89" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.88,<1.21.0" +botocore = ">=1.20.89,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.88" +version = "1.20.89" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1092,12 +1092,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.88-py2.py3-none-any.whl", hash = "sha256:13afcc5e2fcc5e4f9eab1ee46a769cf738a259dcd45f71ee79255f18973e4584"}, - {file = "boto3-1.17.88.tar.gz", hash = "sha256:a715ca6c4457d56ea3e3efde9bdc8be41c29b2f2a904fbd12befdb9cb5e289e4"}, + {file = "boto3-1.17.89-py2.py3-none-any.whl", hash = "sha256:1f02cd513b130f9cd86c99836de6a0a5f78ea55110bdbc9011d9d78ff0fd3204"}, + {file = "boto3-1.17.89.tar.gz", hash = "sha256:06d8dca85a0bb66b7bf2721745895d44691c78dbe7eb3b146702aff85e34af34"}, ] botocore = [ - {file = "botocore-1.20.88-py2.py3-none-any.whl", hash = "sha256:be3cb73fab60a2349e2932bd0cbbe7e7736e3a2cd8c05b539d362ff3e406be76"}, - {file = "botocore-1.20.88.tar.gz", hash = "sha256:bc989edab52d4788aadd8d1aff925f5c6a7cbc68900bfdb8e379965aeac17317"}, + {file = "botocore-1.20.89-py2.py3-none-any.whl", hash = "sha256:e112f9a45db1c5a42f787e4b228a35da6e823bcba70f43f43005b4fb58066446"}, + {file = "botocore-1.20.89.tar.gz", hash = "sha256:ce0fa8bc260ad187824052805d224cee239d953bb4bfb1e52cf35ad79481b316"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From efc3b16a083a18dc034fdcccf07d43d9aaba518b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 8 Jun 2021 09:52:24 +0200 Subject: [PATCH 28/35] chore: fix path for PR auto-labelling --- .github/boring-cyborg.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index f79494d01aa..85dc7e0579d 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -3,21 +3,21 @@ labelPRBasedOnFilePath: area/utilities: - aws_lambda_powertools/utilities/* - aws_lambda_powertools/utilities/**/* - - aws_lambda_powertools_python/middleware_factory/* - - aws_lambda_powertools_python/middleware_factory/**/* + - aws_lambda_powertools/middleware_factory/* + - aws_lambda_powertools/middleware_factory/**/* area/logger: - - aws_lambda_powertools_python/logging/* - - aws_lambda_powertools_python/logging/**/* - - aws_lambda_powertools_python/package_logger.py + - aws_lambda_powertools/logging/* + - aws_lambda_powertools/logging/**/* + - aws_lambda_powertools/package_logger.py area/tracer: - - aws_lambda_powertools_python/tracing/* - - aws_lambda_powertools_python/tracing/**/* + - aws_lambda_powertools/tracing/* + - aws_lambda_powertools/tracing/**/* area/metrics: - - aws_lambda_powertools_python/metrics/* - - aws_lambda_powertools_python/metrics/**/* + - aws_lambda_powertools/metrics/* + - aws_lambda_powertools/metrics/**/* area/event_handlers: - - aws_lambda_powertools_python/event_handler/* - - aws_lambda_powertools_python/event_handler/**/* + - aws_lambda_powertools/event_handler/* + - aws_lambda_powertools/event_handler/**/* documentation: - docs/* From 021e7af4939bd4dc2d02473ce50ed735a67e31c0 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 8 Jun 2021 10:08:23 +0200 Subject: [PATCH 29/35] chore: reintroduce codecov token --- .github/workflows/python_build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 6919856e18a..acf4edb10d8 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -1,4 +1,4 @@ -name: Build +name: Code quality on: pull_request: @@ -44,3 +44,4 @@ jobs: env_vars: OS,PYTHON name: aws-lambda-powertools-python-codecov fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} From 086be96e9fc9a3d383ace89365f6e5271f549f72 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 8 Jun 2021 10:17:07 +0200 Subject: [PATCH 30/35] chore: ignore codecov upload Codecov is now failing more regularly when uploading code coverage. This sets to ignore until we investigate an alternative --- .github/workflows/python_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index acf4edb10d8..0990d6d0152 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -43,5 +43,5 @@ jobs: # flags: unittests env_vars: OS,PYTHON name: aws-lambda-powertools-python-codecov - fail_ci_if_error: true + # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing token: ${{ secrets.CODECOV_TOKEN }} From a717ca79ae495c974ca6ab1d4f9d912ab6f63d3a Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 8 Jun 2021 10:53:03 +0200 Subject: [PATCH 31/35] feat(logger): add option to clear state per invocation (#467) --- aws_lambda_powertools/logging/logger.py | 19 +++++-- docs/core/logger.md | 72 ++++++++++++++++++++++++- tests/functional/test_logger.py | 20 +++++++ 3 files changed, 106 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 3231f30eccd..689409d9813 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -260,12 +260,18 @@ def _configure_sampling(self): ) def inject_lambda_context( - self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = None, correlation_id_path: str = None + self, + lambda_handler: Callable[[Dict, Any], Any] = None, + log_event: bool = None, + correlation_id_path: str = None, + clear_state: bool = False, ): """Decorator to capture Lambda contextual info and inject into logger Parameters ---------- + clear_state : bool, optional + Instructs logger to remove any custom keys previously added lambda_handler : Callable Method to inject the lambda context log_event : bool, optional @@ -311,7 +317,10 @@ def handler(event, context): if lambda_handler is None: logger.debug("Decorator called with parameters") return functools.partial( - self.inject_lambda_context, log_event=log_event, correlation_id_path=correlation_id_path + self.inject_lambda_context, + log_event=log_event, + correlation_id_path=correlation_id_path, + clear_state=clear_state, ) log_event = resolve_truthy_env_var_choice( @@ -322,7 +331,11 @@ def handler(event, context): def decorate(event, context): lambda_context = build_lambda_context_model(context) cold_start = _is_cold_start() - self.append_keys(cold_start=cold_start, **lambda_context.__dict__) + + if clear_state: + self.structure_logs(cold_start=cold_start, **lambda_context.__dict__) + else: + self.append_keys(cold_start=cold_start, **lambda_context.__dict__) if correlation_id_path: self.set_correlation_id(jmespath.search(correlation_id_path, event)) diff --git a/docs/core/logger.md b/docs/core/logger.md index a544bf91e4b..45119ca51d6 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -231,8 +231,9 @@ We provide [built-in JMESPath expressions](#built-in-correlation-id-expressions) ### Appending additional keys -!!! info "Keys might be persisted across invocations" - Always set additional keys as part of your handler to ensure they have the latest value. Additional keys are kept in memory as part of a Logger instance and might be reused in non-cold start scenarios. +!!! info "Custom keys are persisted across warm invocations" + Always set additional keys as part of your handler to ensure they have the latest value, or explicitly clear them with [`clear_state=True`](#clearing-all-state). + You can append additional keys using either mechanism: @@ -426,6 +427,73 @@ You can remove any additional key from Logger state using `remove_keys`. } ``` +#### Clearing all state + +Logger is commonly initialized in the global scope. Due to [Lambda Execution Context reuse](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), this means that custom keys can be persisted across invocations. If you want all custom keys to be deleted, you can use `clear_state=True` param in `inject_lambda_context` decorator. + +!!! info + This is useful when you add multiple custom keys conditionally, instead of setting a default `None` value if not present. Any key with `None` value is automatically removed by Logger. + +!!! danger "This can have unintended side effects if you use Layers" + Lambda Layers code is imported before the Lambda handler. + + This means that `clear_state=True` will instruct Logger to remove any keys previously added before Lambda handler execution proceeds. + + You can either avoid running any code as part of Lambda Layers global scope, or override keys with their latest value as part of handler's execution. + +=== "collect.py" + + ```python hl_lines="5 8" + from aws_lambda_powertools import Logger + + logger = Logger(service="payment") + + @logger.inject_lambda_context(clear_state=True) + def handler(event, context): + if event.get("special_key"): + # Should only be available in the first request log + # as the second request doesn't contain `special_key` + logger.append_keys(debugging_key="value") + + logger.info("Collecting payment") + ``` + +=== "#1 request" + + ```json hl_lines="7" + { + "level": "INFO", + "location": "collect.handler:10", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "special_key": "debug_key", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" + } + ``` + +=== "#2 request" + + ```json hl_lines="7" + { + "level": "INFO", + "location": "collect.handler:10", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": false, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" + } + ``` + + ### Logging exceptions Use `logger.exception` method to log contextual information about exceptions. Logger will include `exception_name` and `exception` keys to aid troubleshooting and error enumeration. diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index ba6e82b72af..44249af6250 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -562,3 +562,23 @@ def handler(event, context): # THEN we should output to a file not stdout log = log_file.read_text() assert "custom handler" in log + + +def test_clear_state_on_inject_lambda_context(lambda_context, stdout, service_name): + # GIVEN + logger = Logger(service=service_name, stream=stdout) + + # WHEN clear_state is set and a key was conditionally added in the first invocation + @logger.inject_lambda_context(clear_state=True) + def handler(event, context): + if event.get("add_key"): + logger.append_keys(my_key="value") + logger.info("Foo") + + # THEN custom key should only exist in the first log + handler({"add_key": True}, lambda_context) + handler({}, lambda_context) + + first_log, second_log = capture_multiple_logging_statements_output(stdout) + assert "my_key" in first_log + assert "my_key" not in second_log From 0505534733f09b98875a5e60fd1217435559263e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 8 Jun 2021 10:56:51 +0200 Subject: [PATCH 32/35] chore: include dependencies label under maintenance --- .github/release-drafter.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml index e2e0a63d205..44ad5a61779 100644 --- a/.github/release-drafter.yml +++ b/.github/release-drafter.yml @@ -20,9 +20,10 @@ categories: - title: '🚒 Deprecations' labels: - 'deprecated' - - title: '🔧 Internal' + - title: '🔧 Maintenance' labels: - 'internal' + - 'dependencies' exclude-labels: - 'skip-changelog' tag-template: 'v$NEXT_PATCH_VERSION' @@ -32,7 +33,7 @@ template: | **[Human readable summary of changes]** ## Changes - + $CHANGES ## This release was made possible by the following contributors: From 7c8966749a97a4645cc336c9b2f4ef9932a06a65 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 8 Jun 2021 11:17:27 +0200 Subject: [PATCH 33/35] chore: fix changelog file redirection Signed-off-by: heitorlessa --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 018f9c9420d..da43c1de67a 100644 --- a/Makefile +++ b/Makefile @@ -81,5 +81,5 @@ release: pr $(MAKE) release-prod changelog: - @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" - docker run -v ${PWD}:/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. -o TMP_CHANGELOG.md + @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" + docker run -v "${PWD}":/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. > TMP_CHANGELOG.md From b8bc3138d2810a835ef9c89367df791623e728e0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 8 Jun 2021 11:33:35 +0200 Subject: [PATCH 34/35] chore: bump version to 1.17.0 Signed-off-by: heitorlessa --- CHANGELOG.md | 31 +++++++++++++++++++++++++++++++ pyproject.toml | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5def3a04989..b2e950fa968 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,37 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## [1.17.0] - 2021-06-08 + +### Added + +* **Documentation**: Include new public roadmap ([#452](https://github.com/awslabs/aws-lambda-powertools-python/issues/452)) +* **Documentation**: Remove old todo in idempotency docs +* **Data classes:** New `AttributeValueType` to get type and value from data in `DynamoDBStreamEvent` ([#462](https://github.com/awslabs/aws-lambda-powertools-python/issues/462)) +* **Data classes:** New decorator `event_source` to instantiate data_classes ([#442](https://github.com/awslabs/aws-lambda-powertools-python/issues/442)) +* **Logger:** New `clear_state` parameter to clear previously added custom keys upon invocation ([#467](https://github.com/awslabs/aws-lambda-powertools-python/issues/467)) +* **Parser:** Support for API Gateway HTTP API [#434](https://github.com/awslabs/aws-lambda-powertools-python/issues/434) ([#441](https://github.com/awslabs/aws-lambda-powertools-python/issues/441)) + +### Maintenance + +* **deps**: bump xenon from 0.7.1 to 0.7.3 ([#446](https://github.com/awslabs/aws-lambda-powertools-python/issues/446)) +* assited changelog pre-generation, auto-label PR ([#443](https://github.com/awslabs/aws-lambda-powertools-python/issues/443)) +* enable dependabot for dep upgrades ([#444](https://github.com/awslabs/aws-lambda-powertools-python/issues/444)) +* enable mergify ([#450](https://github.com/awslabs/aws-lambda-powertools-python/issues/450)) +* **deps**: bump mkdocs-material from 7.1.5 to 7.1.6 ([#451](https://github.com/awslabs/aws-lambda-powertools-python/issues/451)) +* **deps**: bump boto3 from 1.17.78 to 1.17.84 ([#449](https://github.com/awslabs/aws-lambda-powertools-python/issues/449)) +* update mergify to require approval on dependabot ([#456](https://github.com/awslabs/aws-lambda-powertools-python/issues/456)) +* **deps**: bump actions/setup-python from 1 to 2.2.2 ([#445](https://github.com/awslabs/aws-lambda-powertools-python/issues/445)) +* **deps:** bump boto3 from 1.17.87 to 1.17.88 ([#463](https://github.com/awslabs/aws-lambda-powertools-python/issues/463)) +* **deps:** bump boto3 from 1.17.88 to 1.17.89 ([#466](https://github.com/awslabs/aws-lambda-powertools-python/issues/466)) +* **deps:** bump boto3 from 1.17.84 to 1.17.85 ([#455](https://github.com/awslabs/aws-lambda-powertools-python/issues/455)) +* **deps:** bump boto3 from 1.17.85 to 1.17.86 ([#458](https://github.com/awslabs/aws-lambda-powertools-python/issues/458)) +* **deps:** bump boto3 from 1.17.86 to 1.17.87 ([#459](https://github.com/awslabs/aws-lambda-powertools-python/issues/459)) +* **deps-dev:** bump mkdocs-material from 7.1.6 to 7.1.7 ([#464](https://github.com/awslabs/aws-lambda-powertools-python/issues/464)) +* **deps-dev:** bump pytest-cov from 2.12.0 to 2.12.1 ([#454](https://github.com/awslabs/aws-lambda-powertools-python/issues/454)) +* **mergify:** disable check for matrix jobs +* **mergify:** use job name to match GH Actions + ## [1.16.1] - 2021-05-23 ### Fixed diff --git a/pyproject.toml b/pyproject.toml index 13f6d723efc..b0ef085c31d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.16.1" +version = "1.17.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed"] From 8724294013e3157de9478f90e7c1283a952de54f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 8 Jun 2021 20:21:55 +0200 Subject: [PATCH 35/35] docs(data_classes): fix missing dynamodb stream get_type/value --- docs/utilities/data_classes.md | 51 ++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 5b0d0db8c0a..3217c5364d3 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -556,21 +556,42 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "app.py" -```python -from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - DynamoDBStreamEvent, - DynamoDBRecordEventName -) - -def lambda_handler(event, context): - event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) - - # Multiple records can be delivered in a single event - for record in event.records: - if record.event_name == DynamoDBRecordEventName.MODIFY: - do_something_with(record.dynamodb.new_image) - do_something_with(record.dynamodb.old_image) -``` + ```python + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBStreamEvent, + DynamoDBRecordEventName + ) + + def lambda_handler(event, context): + event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + if record.event_name == DynamoDBRecordEventName.MODIFY: + do_something_with(record.dynamodb.new_image) + do_something_with(record.dynamodb.old_image) + ``` + +=== "multiple_records_types.py" + + ```python + from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import AttributeValueType, AttributeValue + from aws_lambda_powertools.utilities.typing import LambdaContext + + + @event_source(data_class=DynamoDBStreamEvent) + def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): + for record in event.records: + key: AttributeValue = record.dynamodb.keys["id"] + if key == AttributeValueType.Number: + # {"N": "123.45"} => "123.45" + assert key.get_value == key.n_value + print(key.get_value) + elif key == AttributeValueType.Map: + assert key.get_value == key.map_value + print(key.get_value) + ``` ### EventBridge