diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml index 8d7871c899f..2e4afb3dac8 100644 --- a/.github/workflows/label_pr_on_title.yml +++ b/.github/workflows/label_pr_on_title.yml @@ -53,12 +53,12 @@ jobs: const pr_number = Number(fs.readFileSync('./number')); const pr_title = fs.readFileSync('./title', 'utf-8').trim(); - const FEAT_REGEX = /feat(\((\w+)\))?(\:.+)/ - const BUG_REGEX = /(fix|bug)(\((\w+)\))?(\:.+)/ - const DOCS_REGEX = /(docs|doc)(\((\w+)\))?(\:.+)/ - const CHORE_REGEX = /(chore)(\((\w+)\))?(\:.+)/ - const DEPRECATED_REGEX = /(deprecated)(\((\w+)\))?(\:.+)/ - const REFACTOR_REGEX = /(refactor)(\((\w+)\))?(\:.+)/ + const FEAT_REGEX = /feat(\((.+)\))?(\:.+)/ + const BUG_REGEX = /(fix|bug)(\((.+)\))?(\:.+)/ + const DOCS_REGEX = /(docs|doc)(\((.+)\))?(\:.+)/ + const CHORE_REGEX = /(chore)(\((.+)\))?(\:.+)/ + const DEPRECATED_REGEX = /(deprecated)(\((.+)\))?(\:.+)/ + const REFACTOR_REGEX = /(refactor)(\((.+)\))?(\:.+)/ const labels = { "feature": FEAT_REGEX, diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 17aa08ead81..7af7bb8e4ba 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -30,6 +30,8 @@ jobs: run: make dev - name: Formatting and Linting run: make lint + - name: Static type checking + run: make mypy - name: Test with pytest run: make test - name: Security baseline diff --git a/.gitignore b/.gitignore index ce5e32bd3e1..c5d27c9789a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ - # Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode # Edit at https://www.gitignore.io/?templates=osx,linux,python,windows,pycharm,visualstudiocode @@ -304,3 +303,4 @@ node_modules api/ site/ !404.html +!docs/overrides/*.html diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ff18157792..894851b3fb6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,34 @@ All notable changes to this project will be documented in this file. This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) format for changes and adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 1.25.0 - 2022-02-09 + +### Bug Fixes + +* **apigateway:** remove indentation in debug_mode ([#987](https://github.com/awslabs/aws-lambda-powertools-python/issues/987)) +* **ci:** pr label regex for special chars in title +* **logger:** exclude source_logger in copy_config_to_registered_loggers ([#1001](https://github.com/awslabs/aws-lambda-powertools-python/issues/1001)) +* **logger:** test generates logfile + +### Documentation + +* **event-handler:** improve testing section for graphql ([#996](https://github.com/awslabs/aws-lambda-powertools-python/issues/996)) +* **layer:** update to 1.24.2 +* **theme:** upgrade mkdocs-material to 8.x ([#1002](https://github.com/awslabs/aws-lambda-powertools-python/issues/1002)) +* **tutorial:** fix broken internal links ([#1000](https://github.com/awslabs/aws-lambda-powertools-python/issues/1000)) +* **general:** fix syntax errors and line highlights ([#1004](https://github.com/awslabs/aws-lambda-powertools-python/pull/1004)) + +### Features + +* **event-handler:** new resolvers to fix current_event typing ([#978](https://github.com/awslabs/aws-lambda-powertools-python/issues/978)) +* **logger:** log_event support event data classes (e.g. S3Event) ([#984](https://github.com/awslabs/aws-lambda-powertools-python/issues/984)) +* **mypy:** complete mypy support for the entire codebase ([#943](https://github.com/awslabs/aws-lambda-powertools-python/issues/943)) + +### Maintenance + +* **deps-dev:** bump flake8-bugbear from 21.11.29 to 22.1.11 ([#955](https://github.com/awslabs/aws-lambda-powertools-python/issues/955)) +* **metrics:** fix tests when warnings are disabled ([#994](https://github.com/awslabs/aws-lambda-powertools-python/issues/994)) + ## 1.24.2 - 2022-01-21 ### Bug Fixes diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0941fbc535b..060726ec11a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -49,13 +49,14 @@ You might find useful to run both the documentation website and the API referenc Category | Convention ------------------------------------------------- | --------------------------------------------------------------------------------- -**Docstring** | We use a slight variation of numpy convention with markdown to help generate more readable API references. -**Style guide** | We use black as well as flake8 extensions to enforce beyond good practices [PEP8](https://pep8.org/). We strive to make use of type annotation as much as possible, but don't overdo in creating custom types. +**Docstring** | We use a slight variation of Numpy convention with markdown to help generate more readable API references. +**Style guide** | We use black as well as flake8 extensions to enforce beyond good practices [PEP8](https://pep8.org/). We use type annotations and enforce static type checking at CI (mypy). **Core utilities** | Core utilities use a Class, always accept `service` as a constructor parameter, can work in isolation, and are also available in other languages implementation. **Utilities** | Utilities are not as strict as core and focus on solving a developer experience problem while following the project [Tenets](https://awslabs.github.io/aws-lambda-powertools-python/#tenets). **Exceptions** | Specific exceptions live within utilities themselves and use `Error` suffix e.g. `MetricUnitError`. -**Git commits** | We follow [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/). These are not enforced as we squash and merge PRs, but PR titles are enforced during CI. -**Documentation** | API reference docs are generated from docstrings which should have Examples section to allow developers to have what they need within their own IDE. Documentation website covers the wider usage, tips, and strive to be concise. +**Git commits** | We follow [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/). We do not enforce conventional commits on contributors to lower the entry bar. Instead, we enforce a conventional PR title so our label automation and changelog are generated correctly. +**API documentation** | API reference docs are generated from docstrings which should have Examples section to allow developers to have what they need within their own IDE. Documentation website covers the wider usage, tips, and strive to be concise. +**Documentation** | We treat it like a product. We sub-divide content aimed at getting started (80% of customers) vs advanced usage (20%). We also ensure customers know how to unit test their code when using our features. ## Finding contributions to work on diff --git a/Makefile b/Makefile index 5b8e9b0d689..fc350ac6923 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ coverage-html: pre-commit: pre-commit run --show-diff-on-failure -pr: lint pre-commit test security-baseline complexity-baseline +pr: lint mypy pre-commit test security-baseline complexity-baseline build: pr poetry build diff --git a/aws_lambda_powertools/event_handler/__init__.py b/aws_lambda_powertools/event_handler/__init__.py index def92f706f9..0b0c08b20a1 100644 --- a/aws_lambda_powertools/event_handler/__init__.py +++ b/aws_lambda_powertools/event_handler/__init__.py @@ -2,7 +2,7 @@ Event handler decorators for common Lambda events """ -from .api_gateway import ApiGatewayResolver +from .api_gateway import ALBResolver, APIGatewayHttpResolver, ApiGatewayResolver, APIGatewayRestResolver from .appsync import AppSyncResolver -__all__ = ["AppSyncResolver", "ApiGatewayResolver"] +__all__ = ["AppSyncResolver", "APIGatewayRestResolver", "APIGatewayHttpResolver", "ALBResolver", "ApiGatewayResolver"] diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 5017597c0f1..8e4ea866dd1 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -10,7 +10,7 @@ from enum import Enum from functools import partial from http import HTTPStatus -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union +from typing import Any, Callable, Dict, List, Match, Optional, Pattern, Set, Tuple, Type, Union from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError @@ -47,9 +47,9 @@ class CORSConfig: Simple cors example using the default permissive cors, not this should only be used during early prototyping ```python - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/my/path", cors=True) def with_cors(): @@ -61,7 +61,7 @@ def with_cors(): ```python from aws_lambda_powertools.event_handler.api_gateway import ( - ApiGatewayResolver, CORSConfig + APIGatewayRestResolver, CORSConfig ) cors_config = CORSConfig( @@ -71,7 +71,7 @@ def with_cors(): max_age=100, allow_credentials=True, ) - app = ApiGatewayResolver(cors=cors_config) + app = APIGatewayRestResolver(cors=cors_config) @app.get("/my/path") def with_cors(): @@ -167,7 +167,7 @@ class Route: """Internally used Route Configuration""" def __init__( - self, method: str, rule: Any, func: Callable, cors: bool, compress: bool, cache_control: Optional[str] + self, method: str, rule: Pattern, func: Callable, cors: bool, compress: bool, cache_control: Optional[str] ): self.method = method.upper() self.rule = rule @@ -252,10 +252,10 @@ def get(self, rule: str, cors: Optional[bool] = None, compress: bool = False, ca ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/get-call") def simple_get(): @@ -277,10 +277,10 @@ def post(self, rule: str, cors: Optional[bool] = None, compress: bool = False, c ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.post("/post-call") def simple_post(): @@ -303,10 +303,10 @@ def put(self, rule: str, cors: Optional[bool] = None, compress: bool = False, ca ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.put("/put-call") def simple_put(): @@ -331,10 +331,10 @@ def delete( ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.delete("/delete-call") def simple_delete(): @@ -358,10 +358,10 @@ def patch( ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.patch("/patch-call") def simple_patch(): @@ -387,10 +387,10 @@ class ApiGatewayResolver(BaseRouter): ```python from aws_lambda_powertools import Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/get-call") def simple_get(): @@ -446,10 +446,6 @@ def __init__( # Allow for a custom serializer or a concise json serialization self._serializer = serializer or partial(json.dumps, separators=(",", ":"), cls=Encoder) - if self._debug: - # Always does a pretty print when in debug mode - self._serializer = partial(json.dumps, indent=4, cls=Encoder) - def route( self, rule: str, @@ -496,7 +492,7 @@ def resolve(self, event, context) -> Dict[str, Any]: Returns the dict response """ if self._debug: - print(self._json_dump(event)) + print(self._json_dump(event), end="") BaseRouter.current_event = self._to_proxy_event(event) BaseRouter.lambda_context = context return self._resolve().build(self.current_event, self._cors) @@ -555,7 +551,7 @@ def _resolve(self) -> ResponseBuilder: for route in self._routes: if method != route.method: continue - match_results: Optional[re.Match] = route.rule.match(path) + match_results: Optional[Match] = route.rule.match(path) if match_results: logger.debug("Found a registered route. Calling function") return self._call_route(route, match_results.groupdict()) # pass fn args @@ -735,3 +731,45 @@ def register_route(func: Callable): self._routes[(rule, methods, cors, compress, cache_control)] = func return register_route + + +class APIGatewayRestResolver(ApiGatewayResolver): + current_event: APIGatewayProxyEvent + + def __init__( + self, + cors: Optional[CORSConfig] = None, + debug: Optional[bool] = None, + serializer: Optional[Callable[[Dict], str]] = None, + strip_prefixes: Optional[List[str]] = None, + ): + """Amazon API Gateway REST and HTTP API v1 payload resolver""" + super().__init__(ProxyEventType.APIGatewayProxyEvent, cors, debug, serializer, strip_prefixes) + + +class APIGatewayHttpResolver(ApiGatewayResolver): + current_event: APIGatewayProxyEventV2 + + def __init__( + self, + cors: Optional[CORSConfig] = None, + debug: Optional[bool] = None, + serializer: Optional[Callable[[Dict], str]] = None, + strip_prefixes: Optional[List[str]] = None, + ): + """Amazon API Gateway HTTP API v2 payload resolver""" + super().__init__(ProxyEventType.APIGatewayProxyEventV2, cors, debug, serializer, strip_prefixes) + + +class ALBResolver(ApiGatewayResolver): + current_event: ALBEvent + + def __init__( + self, + cors: Optional[CORSConfig] = None, + debug: Optional[bool] = None, + serializer: Optional[Callable[[Dict], str]] = None, + strip_prefixes: Optional[List[str]] = None, + ): + """Amazon Application Load Balancer (ALB) resolver""" + super().__init__(ProxyEventType.ALBEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 938742fb0a3..49321181b48 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -349,7 +349,7 @@ def decorate(event, context, **kwargs): if log_event: logger.debug("Event received") - self.info(event) + self.info(getattr(event, "raw_event", event)) return lambda_handler(event, context) diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py index f0e39ddf8f0..41c2f2927b0 100644 --- a/aws_lambda_powertools/logging/utils.py +++ b/aws_lambda_powertools/logging/utils.py @@ -38,7 +38,7 @@ def copy_config_to_registered_loggers( if exclude: exclude.add(source_logger.name) else: - exclude = set(source_logger.name) + exclude = {source_logger.name} # Prepare loggers set if include: diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 927b6873648..00e083d4a7f 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -53,7 +53,7 @@ def lambda_handler(): ---------- service : str, optional service name to be used as metric dimension, by default "service_undefined" - namespace : str + namespace : str, optional Namespace for metrics Raises @@ -209,5 +209,6 @@ def __add_cold_start_metric(self, context: Any) -> None: logger.debug("Adding cold start metric and function_name dimension") with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric: metric.add_dimension(name="function_name", value=context.function_name) - metric.add_dimension(name="service", value=self.service) + if self.service: + metric.add_dimension(name="service", value=str(self.service)) is_cold_start = False diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 11c4e4ce77c..37621f8274a 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, Union +from typing import Optional, Union def strtobool(value: str) -> bool: @@ -38,21 +38,23 @@ def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bo return choice if choice is not None else strtobool(env) -def resolve_env_var_choice(env: Any, choice: Optional[Any] = None) -> Union[bool, Any]: +def resolve_env_var_choice( + env: Optional[str] = None, choice: Optional[Union[str, float]] = None +) -> Optional[Union[str, float]]: """Pick explicit choice over env, if available, otherwise return env value received NOTE: Environment variable should be resolved by the caller. Parameters ---------- - env : Any + env : str, Optional environment variable actual value - choice : bool + choice : str|float, optional explicit choice Returns ------- - choice : str + choice : str, Optional resolved choice as either bool or environment value """ return choice if choice is not None else env diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py index dc4ca300c7c..d90c25f12bc 100644 --- a/aws_lambda_powertools/utilities/batch/exceptions.py +++ b/aws_lambda_powertools/utilities/batch/exceptions.py @@ -5,7 +5,7 @@ from types import TracebackType from typing import List, Optional, Tuple, Type -ExceptionInfo = Tuple[Type[BaseException], BaseException, TracebackType] +ExceptionInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] class BaseBatchProcessingError(Exception): diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py index 38773a399dd..ee6a960c129 100644 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ b/aws_lambda_powertools/utilities/batch/sqs.py @@ -4,12 +4,16 @@ Batch SQS utilities """ import logging +import math import sys -from typing import Callable, Dict, List, Optional, Tuple +from concurrent.futures import ThreadPoolExecutor, as_completed +from typing import Any, Callable, Dict, List, Optional, Tuple, cast import boto3 from botocore.config import Config +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from ...middleware_factory import lambda_handler_decorator from .base import BasePartialProcessor from .exceptions import SQSBatchProcessingError @@ -71,6 +75,7 @@ def __init__( session = boto3_session or boto3.session.Session() self.client = session.client("sqs", config=config) self.suppress_exception = suppress_exception + self.max_message_batch = 10 super().__init__() @@ -84,11 +89,17 @@ def _get_queue_url(self) -> Optional[str]: *_, account_id, queue_name = self.records[0]["eventSourceARN"].split(":") return f"{self.client._endpoint.host}/{account_id}/{queue_name}" - def _get_entries_to_clean(self) -> List: + def _get_entries_to_clean(self) -> List[Dict[str, str]]: """ Format messages to use in batch deletion """ - return [{"Id": msg["messageId"], "ReceiptHandle": msg["receiptHandle"]} for msg in self.success_messages] + entries = [] + # success_messages has generic type of union of SQS, Dynamodb and Kinesis Streams records or Pydantic models. + # Here we get SQS Record only + messages = cast(List[SQSRecord], self.success_messages) + for msg in messages: + entries.append({"Id": msg["messageId"], "ReceiptHandle": msg["receiptHandle"]}) + return entries def _process_record(self, record) -> Tuple: """ @@ -112,23 +123,39 @@ def _prepare(self): self.success_messages.clear() self.fail_messages.clear() - def _clean(self): + def _clean(self) -> Optional[List]: """ Delete messages from Queue in case of partial failure. """ + # If all messages were successful, fall back to the default SQS - - # Lambda behaviour which deletes messages if Lambda responds successfully + # Lambda behavior which deletes messages if Lambda responds successfully if not self.fail_messages: logger.debug(f"All {len(self.success_messages)} records successfully processed") - return + return None queue_url = self._get_queue_url() entries_to_remove = self._get_entries_to_clean() + # Batch delete up to 10 messages at a time (SQS limit) + max_workers = math.ceil(len(entries_to_remove) / self.max_message_batch) - delete_message_response = None if entries_to_remove: - delete_message_response = self.client.delete_message_batch(QueueUrl=queue_url, Entries=entries_to_remove) - + with ThreadPoolExecutor(max_workers=max_workers) as executor: + futures, results = [], [] + while entries_to_remove: + futures.append( + executor.submit( + self._delete_messages, queue_url, entries_to_remove[: self.max_message_batch], self.client + ) + ) + entries_to_remove = entries_to_remove[self.max_message_batch :] + for future in as_completed(futures): + try: + logger.debug("Deleted batch of processed messages from SQS") + results.append(future.result()) + except Exception: + logger.exception("Couldn't remove batch of processed messages from SQS") + raise if self.suppress_exception: logger.debug(f"{len(self.fail_messages)} records failed processing, but exceptions are suppressed") else: @@ -139,6 +166,13 @@ def _clean(self): child_exceptions=self.exceptions, ) + return results + + def _delete_messages(self, queue_url: str, entries_to_remove: List, sqs_client: Any): + delete_message_response = sqs_client.delete_message_batch( + QueueUrl=queue_url, + Entries=entries_to_remove, + ) return delete_message_response diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py index 42b8052fd32..4a7d8e71e1d 100644 --- a/aws_lambda_powertools/utilities/idempotency/idempotency.py +++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py @@ -112,7 +112,7 @@ def process_order(customer_id: str, order: dict, **kwargs): return {"StatusCode": 200} """ - if function is None: + if not function: return cast( AnyCallableT, functools.partial( @@ -132,7 +132,7 @@ def decorate(*args, **kwargs): payload = kwargs.get(data_keyword_argument) - if payload is None: + if not payload: raise RuntimeError( f"Unable to extract '{data_keyword_argument}' from keyword arguments." f" Ensure this exists in your function's signature as well as the caller used it as a keyword argument" diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index b07662e6432..e6ffea10de8 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -92,16 +92,16 @@ def status(self) -> str: else: raise IdempotencyInvalidStatusError(self._status) - def response_json_as_dict(self) -> dict: + def response_json_as_dict(self) -> Optional[dict]: """ Get response data deserialized to python dict Returns ------- - dict + Optional[dict] previous response data deserialized """ - return json.loads(self.response_data) + return json.loads(self.response_data) if self.response_data else None class BasePersistenceLayer(ABC): @@ -121,7 +121,6 @@ def __init__(self): self.raise_on_no_idempotency_key = False self.expires_after_seconds: int = 60 * 60 # 1 hour default self.use_local_cache = False - self._cache: Optional[LRUDict] = None self.hash_function = None def configure(self, config: IdempotencyConfig, function_name: Optional[str] = None) -> None: diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index b059a3b2483..7e8588eb895 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -44,7 +44,7 @@ def get( transform: Optional[str] = None, force_fetch: bool = False, **sdk_options, - ) -> Union[str, list, dict, bytes]: + ) -> Optional[Union[str, dict, bytes]]: """ Retrieve a parameter value or return the cached value @@ -81,6 +81,7 @@ def get( # of supported transform is small and the probability that a given # parameter will always be used in a specific transform, this should be # an acceptable tradeoff. + value: Optional[Union[str, bytes, dict]] = None key = (name, transform) if not force_fetch and self._has_not_expired(key): @@ -92,7 +93,7 @@ def get( except Exception as exc: raise GetParameterError(str(exc)) - if transform is not None: + if transform: if isinstance(value, bytes): value = value.decode("utf-8") value = transform_value(value, transform) @@ -146,26 +147,25 @@ def get_multiple( TransformParameterError When the parameter provider fails to transform a parameter value. """ - key = (path, transform) if not force_fetch and self._has_not_expired(key): return self.store[key].value try: - values: Dict[str, Union[str, bytes, dict, None]] = self._get_multiple(path, **sdk_options) + values = self._get_multiple(path, **sdk_options) # Encapsulate all errors into a generic GetParameterError except Exception as exc: raise GetParameterError(str(exc)) - if transform is not None: - for (key, value) in values.items(): - _transform = get_transform_method(key, transform) - if _transform is None: + if transform: + transformed_values: dict = {} + for (item, value) in values.items(): + _transform = get_transform_method(item, transform) + if not _transform: continue - - values[key] = transform_value(value, _transform, raise_on_transform_error) - + transformed_values[item] = transform_value(value, _transform, raise_on_transform_error) + values.update(transformed_values) self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age)) return values @@ -217,7 +217,9 @@ def get_transform_method(key: str, transform: Optional[str] = None) -> Optional[ return None -def transform_value(value: str, transform: str, raise_on_transform_error: bool = True) -> Union[dict, bytes, None]: +def transform_value( + value: str, transform: str, raise_on_transform_error: Optional[bool] = True +) -> Optional[Union[dict, bytes]]: """ Apply a transform to a value diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py index 4cbb16354c7..fd55e40a95f 100644 --- a/aws_lambda_powertools/utilities/parameters/ssm.py +++ b/aws_lambda_powertools/utilities/parameters/ssm.py @@ -87,7 +87,9 @@ def __init__(self, config: Optional[Config] = None, boto3_session: Optional[boto super().__init__() - def get( + # We break Liskov substitution principle due to differences in signatures of this method and superclass get method + # We ignore mypy error, as changes to the signature here or in a superclass is a breaking change to users + def get( # type: ignore[override] self, name: str, max_age: int = DEFAULT_MAX_AGE_SECS, @@ -95,7 +97,7 @@ def get( decrypt: bool = False, force_fetch: bool = False, **sdk_options - ) -> Union[str, list, dict, bytes]: + ) -> Optional[Union[str, dict, bytes]]: """ Retrieve a parameter value or return the cached value diff --git a/aws_lambda_powertools/utilities/parser/envelopes/apigw.py b/aws_lambda_powertools/utilities/parser/envelopes/apigw.py index 6b74a3037e9..a9af93e9b9c 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/apigw.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/apigw.py @@ -27,6 +27,6 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) Parsed detail payload with model provided """ logger.debug(f"Parsing incoming data with Api Gateway model {APIGatewayProxyEventModel}") - parsed_envelope = APIGatewayProxyEventModel.parse_obj(data) + parsed_envelope: APIGatewayProxyEventModel = APIGatewayProxyEventModel.parse_obj(data) logger.debug(f"Parsing event payload in `detail` with {model}") return self._parse(data=parsed_envelope.body, model=model) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py index a627e4da0e5..336645a2b73 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py @@ -27,6 +27,6 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) Parsed detail payload with model provided """ logger.debug(f"Parsing incoming data with Api Gateway model V2 {APIGatewayProxyEventV2Model}") - parsed_envelope = APIGatewayProxyEventV2Model.parse_obj(data) + parsed_envelope: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model.parse_obj(data) logger.debug(f"Parsing event payload in `detail` with {model}") return self._parse(data=parsed_envelope.body, model=model) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py b/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py index ad1df09a65e..239bfd72025 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py @@ -27,6 +27,6 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) Parsed detail payload with model provided """ logger.debug(f"Parsing incoming data with EventBridge model {EventBridgeModel}") - parsed_envelope = EventBridgeModel.parse_obj(data) + parsed_envelope: EventBridgeModel = EventBridgeModel.parse_obj(data) logger.debug(f"Parsing event payload in `detail` with {model}") return self._parse(data=parsed_envelope.detail, model=model) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py index 9db8e4450f2..9ff221a7b7b 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Dict, List, Optional, Type, Union, cast from ..models import KinesisDataStreamModel from ..types import Model @@ -37,6 +37,9 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) logger.debug(f"Parsing incoming data with Kinesis model {KinesisDataStreamModel}") parsed_envelope: KinesisDataStreamModel = KinesisDataStreamModel.parse_obj(data) logger.debug(f"Parsing Kinesis records in `body` with {model}") - return [ - self._parse(data=record.kinesis.data.decode("utf-8"), model=model) for record in parsed_envelope.Records - ] + models = [] + for record in parsed_envelope.Records: + # We allow either AWS expected contract (bytes) or a custom Model, see #943 + data = cast(bytes, record.kinesis.data) + models.append(self._parse(data=data.decode("utf-8"), model=model)) + return models diff --git a/aws_lambda_powertools/utilities/parser/envelopes/sns.py b/aws_lambda_powertools/utilities/parser/envelopes/sns.py index d4fa7c2f663..50b9d406c23 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/sns.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/sns.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Dict, List, Optional, Type, Union, cast from ..models import SnsModel, SnsNotificationModel, SqsModel from ..types import Model @@ -69,6 +69,8 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) parsed_envelope = SqsModel.parse_obj(data) output = [] for record in parsed_envelope.Records: - sns_notification = SnsNotificationModel.parse_raw(record.body) + # We allow either AWS expected contract (str) or a custom Model, see #943 + body = cast(str, record.body) + sns_notification = SnsNotificationModel.parse_raw(body) output.append(self._parse(data=sns_notification.Message, model=model)) return output diff --git a/aws_lambda_powertools/utilities/parser/models/alb.py b/aws_lambda_powertools/utilities/parser/models/alb.py index 1112d0c04e4..d903e9f0fd8 100644 --- a/aws_lambda_powertools/utilities/parser/models/alb.py +++ b/aws_lambda_powertools/utilities/parser/models/alb.py @@ -1,9 +1,7 @@ -from typing import Dict, Union +from typing import Dict, Type, Union from pydantic import BaseModel -from aws_lambda_powertools.utilities.parser.types import Model - class AlbRequestContextData(BaseModel): targetGroupArn: str @@ -16,7 +14,7 @@ class AlbRequestContext(BaseModel): class AlbModel(BaseModel): httpMethod: str path: str - body: Union[str, Model] + body: Union[str, Type[BaseModel]] isBase64Encoded: bool headers: Dict[str, str] queryStringParameters: Dict[str, str] diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index ce519b8e0e3..78b40cd2c0c 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -1,10 +1,10 @@ from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Type, Union from pydantic import BaseModel, root_validator from pydantic.networks import IPvAnyNetwork -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal class ApiGatewayUserCertValidity(BaseModel): @@ -89,4 +89,4 @@ class APIGatewayProxyEventModel(BaseModel): pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] isBase64Encoded: bool - body: Optional[Union[str, Model]] + body: Optional[Union[str, Type[BaseModel]]] diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index ddaf2d7ef82..f97dad3bcb0 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -1,10 +1,10 @@ from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Type, Union from pydantic import BaseModel, Field from pydantic.networks import IPvAnyNetwork -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal class RequestContextV2AuthorizerIamCognito(BaseModel): @@ -67,5 +67,5 @@ class APIGatewayProxyEventV2Model(BaseModel): pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] requestContext: RequestContextV2 - body: Optional[Union[str, Model]] + body: Optional[Union[str, Type[BaseModel]]] isBase64Encoded: bool diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index 9b954ec3b13..71e560276a4 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -3,19 +3,17 @@ import logging import zlib from datetime import datetime -from typing import List, Union +from typing import List, Type, Union from pydantic import BaseModel, Field, validator -from aws_lambda_powertools.utilities.parser.types import Model - logger = logging.getLogger(__name__) class CloudWatchLogsLogEvent(BaseModel): id: str # noqa AA03 VNE003 timestamp: datetime - message: Union[str, Model] + message: Union[str, Type[BaseModel]] class CloudWatchLogsDecode(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/dynamodb.py b/aws_lambda_powertools/utilities/parser/models/dynamodb.py index fe7514bada0..772b8fb580f 100644 --- a/aws_lambda_powertools/utilities/parser/models/dynamodb.py +++ b/aws_lambda_powertools/utilities/parser/models/dynamodb.py @@ -1,16 +1,16 @@ from datetime import date -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Type, Union from pydantic import BaseModel -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal class DynamoDBStreamChangedRecordModel(BaseModel): ApproximateCreationDateTime: Optional[date] Keys: Dict[str, Dict[str, Any]] - NewImage: Optional[Union[Dict[str, Any], Model]] - OldImage: Optional[Union[Dict[str, Any], Model]] + NewImage: Optional[Union[Dict[str, Any], Type[BaseModel]]] + OldImage: Optional[Union[Dict[str, Any], Type[BaseModel]]] SequenceNumber: str SizeBytes: int StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] diff --git a/aws_lambda_powertools/utilities/parser/models/event_bridge.py b/aws_lambda_powertools/utilities/parser/models/event_bridge.py index f98a263c680..68359f867bd 100644 --- a/aws_lambda_powertools/utilities/parser/models/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/models/event_bridge.py @@ -1,10 +1,8 @@ from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Type, Union from pydantic import BaseModel, Field -from aws_lambda_powertools.utilities.parser.types import Model - class EventBridgeModel(BaseModel): version: str @@ -15,5 +13,5 @@ class EventBridgeModel(BaseModel): region: str resources: List[str] detail_type: str = Field(None, alias="detail-type") - detail: Union[Dict[str, Any], Model] + detail: Union[Dict[str, Any], Type[BaseModel]] replay_name: Optional[str] = Field(None, alias="replay-name") diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis.py b/aws_lambda_powertools/utilities/parser/models/kinesis.py index 1c7c31c97b4..be868ca44ba 100644 --- a/aws_lambda_powertools/utilities/parser/models/kinesis.py +++ b/aws_lambda_powertools/utilities/parser/models/kinesis.py @@ -1,11 +1,11 @@ import base64 import logging from binascii import Error as BinAsciiError -from typing import List, Union +from typing import List, Type, Union from pydantic import BaseModel, validator -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal logger = logging.getLogger(__name__) @@ -14,7 +14,7 @@ class KinesisDataStreamRecordPayload(BaseModel): kinesisSchemaVersion: str partitionKey: str sequenceNumber: str - data: Union[bytes, Model] # base64 encoded str is parsed into bytes + data: Union[bytes, Type[BaseModel]] # base64 encoded str is parsed into bytes approximateArrivalTimestamp: float @validator("data", pre=True, allow_reuse=True) diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py index 778786bc8cb..ef59e9c2f98 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py +++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py @@ -1,9 +1,7 @@ -from typing import Dict, Optional, Union +from typing import Dict, Optional, Type, Union from pydantic import BaseModel, HttpUrl -from aws_lambda_powertools.utilities.parser.types import Model - class S3ObjectContext(BaseModel): inputS3Url: HttpUrl @@ -14,7 +12,7 @@ class S3ObjectContext(BaseModel): class S3ObjectConfiguration(BaseModel): accessPointArn: str supportingAccessPointArn: str - payload: Union[str, Model] + payload: Union[str, Type[BaseModel]] class S3ObjectUserRequest(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/sns.py b/aws_lambda_powertools/utilities/parser/models/sns.py index cdcd9549a98..e329162e5c8 100644 --- a/aws_lambda_powertools/utilities/parser/models/sns.py +++ b/aws_lambda_powertools/utilities/parser/models/sns.py @@ -1,10 +1,12 @@ from datetime import datetime -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional +from typing import Type as TypingType +from typing import Union from pydantic import BaseModel, root_validator from pydantic.networks import HttpUrl -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal class SnsMsgAttributeModel(BaseModel): @@ -18,7 +20,7 @@ class SnsNotificationModel(BaseModel): UnsubscribeUrl: HttpUrl Type: Literal["Notification"] MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] - Message: Union[str, Model] + Message: Union[str, TypingType[BaseModel]] MessageId: str SigningCertUrl: HttpUrl Signature: str diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index 47871ab8840..1d56c4f8e34 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -1,9 +1,9 @@ from datetime import datetime -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional, Type, Union from pydantic import BaseModel -from aws_lambda_powertools.utilities.parser.types import Literal, Model +from aws_lambda_powertools.utilities.parser.types import Literal class SqsAttributesModel(BaseModel): @@ -52,7 +52,7 @@ class SqsMsgAttributeModel(BaseModel): class SqsRecordModel(BaseModel): messageId: str receiptHandle: str - body: Union[str, Model] + body: Union[str, Type[BaseModel]] attributes: SqsAttributesModel messageAttributes: Dict[str, SqsMsgAttributeModel] md5OfBody: str diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index ae0960875c2..cc01b66600d 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -3,7 +3,7 @@ title: REST API description: Core utility --- -Event handler for Amazon API Gateway REST/HTTP APIs and Application Loader Balancer (ALB). +Event handler for Amazon API Gateway REST and HTTP APIs, and Application Loader Balancer (ALB). ### Key Features @@ -12,7 +12,7 @@ Event handler for Amazon API Gateway REST/HTTP APIs and Application Loader Balan * Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to easily access event and identity information * Built-in support for Decimals JSON encoding * Support for dynamic path expressions -* Router to allow for splitting up the handler accross multiple files +* Router to allow for splitting up the handler across multiple files ## Getting started @@ -63,25 +63,36 @@ Resources: Method: ANY # Send requests using any http method to the lambda function ``` -### API Gateway decorator +### Event Resolvers -You can define your functions to match a path and HTTP method, when you use the decorator `ApiGatewayResolver`. +Before you decorate your functions to handle a given path and HTTP method(s), you need to initialize a resolver. -Here's an example where we have two separate functions to resolve two paths: `/hello`. +A resolver will handle request resolution, include [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. + +For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, and `ALBResolver`. + +???+ info + We will use `APIGatewayRestResolver` as the default across examples. + +#### API Gateway REST API + +When using Amazon API Gateway REST API to front your Lambda functions, you can use `APIGatewayRestResolver`. + +Here's an example on how we can handle the `/hello` path. ???+ info - We automatically serialize `Dict` responses as JSON, trim whitespaces for compact responses, and set content-type to `application/json`. + We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. === "app.py" ```python hl_lines="3 7 9 12 18" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver() # by default API Gateway REST API (v1) + app = APIGatewayRestResolver() @app.get("/hello") @tracer.capture_method @@ -106,32 +117,32 @@ Here's an example where we have two separate functions to resolve two paths: `/h "httpMethod": "GET", "isBase64Encoded": false, "queryStringParameters": { - "foo": "bar" + "foo": "bar" }, "multiValueQueryStringParameters": {}, "pathParameters": { - "hello": "/hello" + "hello": "/hello" }, "stageVariables": {}, "headers": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Encoding": "gzip, deflate, sdch", - "Accept-Language": "en-US,en;q=0.8", - "Cache-Control": "max-age=0", - "CloudFront-Forwarded-Proto": "https", - "CloudFront-Is-Desktop-Viewer": "true", - "CloudFront-Is-Mobile-Viewer": "false", - "CloudFront-Is-SmartTV-Viewer": "false", - "CloudFront-Is-Tablet-Viewer": "false", - "CloudFront-Viewer-Country": "US", - "Host": "1234567890.execute-api.us-east-1.amazonaws.com", - "Upgrade-Insecure-Requests": "1", - "User-Agent": "Custom User Agent String", - "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", - "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", - "X-Forwarded-For": "127.0.0.1, 127.0.0.2", - "X-Forwarded-Port": "443", - "X-Forwarded-Proto": "https" + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" }, "multiValueHeaders": {}, "requestContext": { @@ -176,18 +187,23 @@ Here's an example where we have two separate functions to resolve two paths: `/h } ``` -#### HTTP API +#### API Gateway HTTP API + +When using Amazon API Gateway HTTP API to front your Lambda functions, you can use `APIGatewayHttpResolver`. -When using API Gateway HTTP API to front your Lambda functions, you can instruct `ApiGatewayResolver` to conform with their contract via `proxy_type` param: +???+ note + Using HTTP API v1 payload? Use `APIGatewayRestResolver` instead. `APIGatewayHttpResolver` defaults to v2 payload. + +Here's an example on how we can handle the `/hello` path. ```python hl_lines="3 7" title="Using HTTP API resolver" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, ProxyEventType +from aws_lambda_powertools.event_handler import APIGatewayHttpResolver tracer = Tracer() logger = Logger() -app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEventV2) +app = APIGatewayHttpResolver() @app.get("/hello") @tracer.capture_method @@ -201,18 +217,18 @@ def lambda_handler(event, context): return app.resolve(event, context) ``` -#### ALB +#### Application Load Balancer -When using ALB to front your Lambda functions, you can instruct `ApiGatewayResolver` to conform with their contract via `proxy_type` param: +When using Amazon Application Load Balancer to front your Lambda functions, you can use `ALBResolver`. ```python hl_lines="3 7" title="Using ALB resolver" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, ProxyEventType +from aws_lambda_powertools.event_handler import ALBResolver tracer = Tracer() logger = Logger() -app = ApiGatewayResolver(proxy_type=ProxyEventType.ALBEvent) +app = ALBResolver() @app.get("/hello") @tracer.capture_method @@ -235,11 +251,11 @@ You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This all ```python hl_lines="9 11" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello/") @tracer.capture_method @@ -273,16 +289,16 @@ You can also nest paths as configured earlier in [our sample infrastructure](#re ```python hl_lines="9 11" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("//") @tracer.capture_method def get_message(message, name): - return {"message": f"{message}, {name}}"} + return {"message": f"{message}, {name}"} # You can continue to use other utilities just as before @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) @@ -317,9 +333,9 @@ You can also combine nested paths with greedy regex to catch in between routes. === "app.py" ```python hl_lines="5" - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get(".+") def catch_any_route_after_any(): @@ -348,11 +364,11 @@ You can use named decorators to specify the HTTP method that should be handled i ```python hl_lines="9-10" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() # Only POST HTTP requests to the path /hello will route to this function @app.post("/hello") @@ -387,11 +403,11 @@ HTTP methods. ```python hl_lines="9-10" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() # PUT and POST HTTP requests to the path /hello will route to this function @app.route("/hello", method=["PUT", "POST"]) @@ -425,7 +441,7 @@ HTTP methods. By integrating with [Data classes utilities](../../utilities/data_classes.md){target="_blank"}, you have access to request details, Lambda context and also some convenient methods. -These are made available in the response returned when instantiating `ApiGatewayResolver`, for example `app.current_event` and `app.lambda_context`. +These are made available in the response returned when instantiating `APIGatewayRestResolver`, for example `app.current_event` and `app.lambda_context`. #### Query strings and payload @@ -434,9 +450,9 @@ Within `app.current_event` property, you can access query strings as dictionary You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property. ```python hl_lines="7-9 11" title="Accessing query strings, JSON payload, and raw payload" -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get("/hello") def get_hello_you(): @@ -445,7 +461,7 @@ def get_hello_you(): payload = app.current_event.body name = app.current_event.get_query_string_value(name="name", default_value="") - return {"message": f"hello {name}}"} + return {"message": f"hello {name}"} def lambda_handler(event, context): return app.resolve(event, context) @@ -456,16 +472,16 @@ def lambda_handler(event, context): Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. ```python hl_lines="7-8" title="Accessing HTTP Headers" -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get("/hello") def get_hello_you(): headers_as_dict = app.current_event.headers name = app.current_event.get_header_value(name="X-Name", default_value="") - return {"message": f"hello {name}}"} + return {"message": f"hello {name}"} def lambda_handler(event, context): return app.resolve(event, context) @@ -482,12 +498,12 @@ You can use **`not_found`** decorator to override this behaviour, and return a c from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response from aws_lambda_powertools.event_handler.exceptions import NotFoundError tracer = Tracer() logger = Logger() -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.not_found @tracer.capture_method @@ -521,11 +537,11 @@ You can use **`exception_handler`** decorator with any Python exception. This al from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response tracer = Tracer() logger = Logger() -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.exception_handler(ValueError) def handle_value_error(ex: ValueError): @@ -553,7 +569,6 @@ def lambda_handler(event, context): return app.resolve(event, context) ``` - ### Raising HTTP errors You can easily raise any HTTP Error back to the client using `ServiceError` exception. @@ -566,7 +581,7 @@ Additionally, we provide pre-defined errors for the most popular ones such as HT ```python hl_lines="4-10 20 25 30 35 39" title="Raising common HTTP Status errors (4xx, 5xx)" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.event_handler.exceptions import ( BadRequestError, InternalServerError, @@ -578,7 +593,7 @@ from aws_lambda_powertools.event_handler.exceptions import ( tracer = Tracer() logger = Logger() -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get(rule="/bad-request-error") def bad_request_error(): @@ -613,7 +628,7 @@ def handler(event, context): ### Custom Domain API Mappings -When using Custom Domain API Mappings feature, you must use **`strip_prefixes`** param in the `ApiGatewayResolver` constructor. +When using Custom Domain API Mappings feature, you must use **`strip_prefixes`** param in the `APIGatewayRestResolver` constructor. Scenario: You have a custom domain `api.mydomain.dev` and set an API Mapping `payment` to forward requests to your Payments API, the path argument will be `/payment/`. @@ -624,11 +639,11 @@ This will lead to a HTTP 404 despite having your Lambda configured correctly. Se ```python hl_lines="7" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver tracer = Tracer() logger = Logger() - app = ApiGatewayResolver(strip_prefixes=["/payment"]) + app = APIGatewayRestResolver(strip_prefixes=["/payment"]) @app.get("/subscriptions/") @tracer.capture_method @@ -661,7 +676,7 @@ This will lead to a HTTP 404 despite having your Lambda configured correctly. Se ### CORS -You can configure CORS at the `ApiGatewayResolver` constructor via `cors` parameter using the `CORSConfig` class. +You can configure CORS at the `APIGatewayRestResolver` constructor via `cors` parameter using the `CORSConfig` class. This will ensure that CORS headers are always returned as part of the response when your functions match the path invoked. @@ -670,13 +685,13 @@ This will ensure that CORS headers are always returned as part of the response w ```python hl_lines="9 11" from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, CORSConfig + from aws_lambda_powertools.event_handler import APIGatewayRestResolver, CORSConfig tracer = Tracer() logger = Logger() cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) - app = ApiGatewayResolver(cors=cors_config) + app = APIGatewayRestResolver(cors=cors_config) @app.get("/hello/") @tracer.capture_method @@ -753,20 +768,22 @@ You can use the `Response` class to have full control over the response, for exa === "app.py" - ```python hl_lines="10-14" - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + ```python hl_lines="11-16" + import json + from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello") def get_hello_you(): payload = json.dumps({"message": "I'm a teapot"}) custom_headers = {"X-Custom": "X-Value"} - return Response(status_code=418, - content_type="application/json", - body=payload, - headers=custom_headers + return Response( + status_code=418, + content_type="application/json", + body=payload, + headers=custom_headers, ) def lambda_handler(event, context): @@ -796,9 +813,9 @@ You can compress with gzip and base64 encode your responses via `compress` param === "app.py" ```python hl_lines="5 7" - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello", compress=True) def get_hello_you(): @@ -850,9 +867,9 @@ Like `compress` feature, the client must send the `Accept` header with the corre import os from pathlib import Path - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - app = ApiGatewayResolver() + app = APIGatewayRestResolver() logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() @app.get("/logo") @@ -940,10 +957,12 @@ This will enable full tracebacks errors in the response, print request and respo ???+ danger This might reveal sensitive information in your logs and relax CORS restrictions, use it sparingly. + It's best to use for local development only! + ```python hl_lines="3" title="Enabling debug mode" -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver -app = ApiGatewayResolver(debug=True) +app = APIGatewayRestResolver(debug=True) @app.get("/hello") def get_hello_universe(): @@ -957,12 +976,14 @@ def lambda_handler(event, context): You can instruct API Gateway handler to use a custom serializer to best suit your needs, for example take into account Enums when serializing. -```python hl_lines="19-20 24" title="Using a custom JSON serializer for responses" +```python hl_lines="21-22 26" title="Using a custom JSON serializer for responses" import json from enum import Enum from json import JSONEncoder from typing import Dict +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + class CustomEncoder(JSONEncoder): """Your customer json encoder""" def default(self, obj): @@ -977,11 +998,11 @@ class CustomEncoder(JSONEncoder): return JSONEncoder.default(self, obj) def custom_serializer(obj) -> str: - """Your custom serializer function ApiGatewayResolver will use""" + """Your custom serializer function APIGatewayRestResolver will use""" return json.dumps(obj, cls=CustomEncoder) # Assigning your custom serializer -app = ApiGatewayResolver(serializer=custom_serializer) +app = APIGatewayRestResolver(serializer=custom_serializer) class Color(Enum): RED = 1 @@ -1004,9 +1025,9 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in === "users.py" - We import **Router** instead of **ApiGatewayResolver**; syntax wise is exactly the same. + We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. - ```python hl_lines="4 8 12 15 21" + ```python hl_lines="5 8 12 15 21" import itertools from typing import Dict @@ -1043,13 +1064,13 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in from typing import Dict from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.utilities.typing import LambdaContext import users logger = Logger() - app = ApiGatewayResolver() + app = APIGatewayRestResolver() app.include_router(users.router) @@ -1068,12 +1089,12 @@ When necessary, you can set a prefix when including a router object. This means ```python hl_lines="9" from typing import Dict - from aws_lambda_powertools.event_handler import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.utilities.typing import LambdaContext import users - app = ApiGatewayResolver() + app = APIGatewayRestResolver() app.include_router(users.router, prefix="/users") # prefix '/users' to any route in `users.router` @@ -1202,12 +1223,11 @@ This sample project contains a Users function with two distinct set of routes, ` === "src/users/main.py" - ```python hl_lines="9 15-16" + ```python hl_lines="8 14-15" from typing import Dict from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import ApiGatewayResolver - from aws_lambda_powertools.event_handler.api_gateway import ProxyEventType + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER from aws_lambda_powertools.utilities.typing import LambdaContext @@ -1215,7 +1235,7 @@ This sample project contains a Users function with two distinct set of routes, ` tracer = Tracer() logger = Logger() - app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + app = APIGatewayRestResolver() app.include_router(health.router) app.include_router(users.router) @@ -1338,7 +1358,7 @@ You can test your routes by passing a proxy event request where `path` and `http def test_lambda_handler(lambda_context): minimal_event = { "path": "/hello", - "httpMethod": "GET" + "httpMethod": "GET", "requestContext": { # correlation ID "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef" } @@ -1352,10 +1372,10 @@ You can test your routes by passing a proxy event request where `path` and `http ```python from aws_lambda_powertools import Logger from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver logger = Logger() - app = ApiGatewayResolver() # by default API Gateway REST API (v1) + app = APIGatewayRestResolver() # API Gateway REST API (v1) @app.get("/hello") def get_hello_universe(): @@ -1374,3 +1394,14 @@ You can test your routes by passing a proxy event request where `path` and `http Chalice is a full featured microframework that manages application and infrastructure. This utility, however, is largely focused on routing to reduce boilerplate and expects you to setup and manage infrastructure with your framework of choice. That said, [Chalice has native integration with Lambda Powertools](https://aws.github.io/chalice/topics/middleware.html){target="_blank"} if you're looking for a more opinionated and web framework feature set. + +**What happened to `ApiGatewayResolver`?** + +It's been superseded by more explicit resolvers like `APIGatewayRestResolver`, `APIGatewayHttpResolver`, and `ALBResolver`. + +`ApiGatewayResolver` handled multiple types of event resolvers for convenience via `proxy_type` param. However, +it made it impossible for static checkers like Mypy and IDEs IntelliSense to know what properties a `current_event` would have due to late bound resolution. + +This provided a suboptimal experience for customers not being able to find all properties available besides common ones between API Gateway REST, HTTP, and ALB - while manually annotating `app.current_event` would work it is not the experience we want to provide to customers. + +`ApiGatewayResolver` will be deprecated in v2 and have appropriate warnings as soon as we have a v2 draft. diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index a4d5f635886..19205289bfd 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -346,24 +346,24 @@ You can nest `app.resolver()` decorator multiple times when resolving fields wit === "nested_mappings.py" ```python hl_lines="4 8 10-12 18" - from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() + tracer = Tracer(service="sample_resolver") + logger = Logger(service="sample_resolver") + app = AppSyncResolver() - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - return name + description + @app.resolver(field_name="listLocations") + @app.resolver(field_name="locations") + def get_locations(name: str, description: str = ""): + return name + description - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) ``` === "schema.graphql" @@ -396,7 +396,8 @@ You can nest `app.resolver()` decorator multiple times when resolving fields wit For Lambda Python3.8+ runtime, this utility supports async functions when you use in conjunction with `asyncio.run`. -```python hl_lines="4 8 10-12 20" title="Resolving GraphQL resolvers async" +```python hl_lines="5 9 11-13 21" title="Resolving GraphQL resolvers async" +import asyncio from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths @@ -603,33 +604,34 @@ You can subclass `AppSyncResolverEvent` to bring your own set of methods to hand === "custom_model.py" - ```python hl_lines="11-14 19 26" - from aws_lambda_powertools import Logger, Tracer + ```python hl_lines="12-15 20 27" + from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() + tracer = Tracer(service="sample_resolver") + logger = Logger(service="sample_resolver") + app = AppSyncResolver() - class MyCustomModel(AppSyncResolverEvent): - @property - def country_viewer(self) -> str: - return self.request_headers.get("cloudfront-viewer-country") + class MyCustomModel(AppSyncResolverEvent): + @property + def country_viewer(self) -> str: + return self.request_headers.get("cloudfront-viewer-country") - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - if app.current_event.country_viewer == "US": - ... - return name + description + @app.resolver(field_name="listLocations") + @app.resolver(field_name="locations") + def get_locations(name: str, description: str = ""): + if app.current_event.country_viewer == "US": + ... + return name + description - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context, data_model=MyCustomModel) + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context, data_model=MyCustomModel) ``` === "schema.graphql" @@ -775,27 +777,87 @@ You can test your resolvers by passing a mocked or actual AppSync Lambda event t You can use either `app.resolve(event, context)` or simply `app(event, context)`. -Here's an example from our internal functional test. +Here's an example of how you can test your synchronous resolvers: -=== "test_direct_resolver.py" +=== "test_resolver.py" ```python - def test_direct_resolver(): - # Check whether we can handle an example appsync direct resolver - # load_event primarily deserialize the JSON event into a dict - mock_event = load_event("appSyncDirectResolver.json") + import json + import pytest + from pathlib import Path - app = AppSyncResolver() + from src.index import app # import the instance of AppSyncResolver from your code - @app.resolver(field_name="createSomething") - def create_something(id: str): - assert app.lambda_context == {} - return id + def test_direct_resolver(): + # Load mock event from a file + json_file_path = Path("appSyncDirectResolver.json") + with open(json_file_path) as json_file: + mock_event = json.load(json_file) # Call the implicit handler result = app(mock_event, {}) - assert result == "my identifier" + assert result == "created this value" + ``` + +=== "src/index.py" + + ```python + + from aws_lambda_powertools.event_handler import AppSyncResolver + + app = AppSyncResolver() + + @app.resolver(field_name="createSomething") + def create_something(): + return "created this value" + + ``` + +=== "appSyncDirectResolver.json" + + ```json + --8<-- "tests/events/appSyncDirectResolver.json" + ``` + +And an example for testing asynchronous resolvers. Note that this requires the `pytest-asyncio` package: + +=== "test_async_resolver.py" + + ```python + import json + import pytest + from pathlib import Path + + from src.index import app # import the instance of AppSyncResolver from your code + + @pytest.mark.asyncio + async def test_direct_resolver(): + # Load mock event from a file + json_file_path = Path("appSyncDirectResolver.json") + with open(json_file_path) as json_file: + mock_event = json.load(json_file) + + # Call the implicit handler + result = await app(mock_event, {}) + + assert result == "created this value" + ``` + +=== "src/index.py" + + ```python + import asyncio + + from aws_lambda_powertools.event_handler import AppSyncResolver + + app = AppSyncResolver() + + @app.resolver(field_name="createSomething") + async def create_something_async(): + await asyncio.sleep(1) # Do async stuff + return "created this value" + ``` === "appSyncDirectResolver.json" diff --git a/docs/index.md b/docs/index.md index 84944b06561..9f2f9ffe55f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -24,7 +24,7 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:**](#){: .copyMe} +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:**](#){: .copyMe} * **PyPi**: **`pip install aws-lambda-powertools`** ### Lambda Layer @@ -37,23 +37,23 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN |--------------------------- | --------------------------- - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:8 :clipboard:](#){: .copyMe} + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:9 :clipboard:](#){: .copyMe} === "SAM" @@ -62,7 +62,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:8 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:9 ``` === "Serverless framework" @@ -72,7 +72,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:8 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:9 ``` === "CDK" @@ -88,7 +88,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:8" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:9" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -137,7 +137,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:8"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:9"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -156,7 +156,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:8 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:9 ❯ amplify push -y @@ -167,7 +167,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:8 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:9 ? Do you want to edit the local lambda function now? No ``` @@ -175,7 +175,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:8 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:9 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -454,5 +454,7 @@ As a best practice, AWS Lambda Powertools module logging statements are suppress ```python title="Powertools debug mode example" from aws_lambda_powertools.logging.logger import set_package_logger -set_package_logger() +set_package_logger() # (1) ``` + +1. :information_source: this will configure our `aws_lambda_powertools` logger with debug. diff --git a/docs/overrides/assets/images/powertools_docs_thumbnail.png b/docs/overrides/assets/images/powertools_docs_thumbnail.png deleted file mode 100644 index e87b8f19873..00000000000 Binary files a/docs/overrides/assets/images/powertools_docs_thumbnail.png and /dev/null differ diff --git a/docs/overrides/main.html b/docs/overrides/main.html new file mode 100644 index 00000000000..0af326afb24 --- /dev/null +++ b/docs/overrides/main.html @@ -0,0 +1,8 @@ +{% extends "base.html" %} + +{% block outdated %} + You're not viewing the latest version. + + Click here to go to latest. + +{% endblock %} diff --git a/docs/tutorial/idempotency.md b/docs/tutorial/idempotency.md deleted file mode 100644 index 4b03b66abd4..00000000000 --- a/docs/tutorial/idempotency.md +++ /dev/null @@ -1,1009 +0,0 @@ ---- -title: Idempotency -description: Utility ---- - -The idempotency utility provides a simple solution to convert your Lambda functions into idempotent operations which -are safe to retry. - -## Terminology - -The property of idempotency means that an operation does not cause additional side effects if it is called more than -once with the same input parameters. - -**Idempotent operations will return the same result when they are called multiple -times with the same parameters**. This makes idempotent operations safe to retry. - -**Idempotency key** is a hash representation of either the entire event or a specific configured subset of the event, and invocation results are **JSON serialized** and stored in your persistence storage layer. - -## Key features - -* Prevent Lambda handler from executing more than once on the same event payload during a time window -* Ensure Lambda handler returns the same result when called with the same payload -* Select a subset of the event as the idempotency key using JMESPath expressions -* Set a time window in which records with the same payload should be considered duplicates - -## Getting started - -### Required resources - -Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. - -As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. - -**Default table configuration** - -If you're not [changing the default configuration for the DynamoDB persistence layer](#dynamodbpersistencelayer), this is the expected default configuration: - -Configuration | Value | Notes -------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- -Partition key | `id` | -TTL attribute name | `expiration` | This can only be configured after your table is created if you're using AWS Console - -???+ tip "Tip: You can share a single state table for all functions" - You can reuse the same DynamoDB table to store idempotency state. We add your `function_name` in addition to the idempotency key as a hash key. - -```yaml hl_lines="5-13 21-23" title="AWS Serverless Application Model (SAM) example" -Resources: - IdempotencyTable: - Type: AWS::DynamoDB::Table - Properties: - AttributeDefinitions: - - AttributeName: id - AttributeType: S - KeySchema: - - AttributeName: id - KeyType: HASH - TimeToLiveSpecification: - AttributeName: expiration - Enabled: true - BillingMode: PAY_PER_REQUEST - - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Runtime: python3.8 - ... - Policies: - - DynamoDBCrudPolicy: - TableName: !Ref IdempotencyTable -``` - -???+ warning "Warning: Large responses with DynamoDB persistence layer" - When using this utility with DynamoDB, your function's responses must be [smaller than 400KB](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html#limits-items). - - Larger items cannot be written to DynamoDB and will cause exceptions. - -???+ info "Info: DynamoDB" - Each function invocation will generally make 2 requests to DynamoDB. If the - result returned by your Lambda is less than 1kb, you can expect 2 WCUs per invocation. For retried invocations, you will - see 1WCU and 1RCU. Review the [DynamoDB pricing documentation](https://aws.amazon.com/dynamodb/pricing/) to - estimate the cost. - -### Idempotent decorator - -You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. - -=== "app.py" - - ```python hl_lines="1-3 5 7 14" - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - payment = create_subscription_payment( - user=event['user'], - product=event['product_id'] - ) - ... - return { - "payment_id": payment.id, - "message": "success", - "statusCode": 200, - } - ``` - -=== "Example event" - - ```json - { - "username": "xyz", - "product_id": "123456789" - } - ``` - -### Idempotent_function decorator - -Similar to [idempotent decorator](#idempotent-decorator), you can use `idempotent_function` decorator for any synchronous Python function. - -When using `idempotent_function`, you must tell us which keyword parameter in your function signature has the data we should use via **`data_keyword_argument`**. - -!!! info "We support JSON serializable data, [Python Dataclasses](https://docs.python.org/3.7/library/dataclasses.html){target="_blank"}, [Parser/Pydantic Models](parser.md){target="_blank"}, and our [Event Source Data Classes](./data_classes.md){target="_blank"}." - -???+ warning - Make sure to call your decorated function using keyword arguments - -=== "batch_sample.py" - - This example also demonstrates how you can integrate with [Batch utility](batch.md), so you can process each record in an idempotent manner. - - ```python hl_lines="4-5 16 21 29" - from aws_lambda_powertools.utilities.batch import (BatchProcessor, EventType, - batch_processor) - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - - - processor = BatchProcessor(event_type=EventType.SQS) - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="messageId", # see Choosing a payload subset section - use_local_cache=True, - ) - - - @idempotent_function(data_keyword_argument="record", config=config, persistence_store=dynamodb) - def record_handler(record: SQSRecord): - return {"message": record["body"]} - - - @idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) - def dummy(arg_one, arg_two, data: dict, **kwargs): - return {"data": data} - - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - # `data` parameter must be called as a keyword argument to work - dummy("hello", "universe", data="test") - return processor.response() - ``` - -=== "Batch event" - - ```json hl_lines="4" - { - "Records": [ - { - "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...", - "body": "Test message.", - "attributes": { - "ApproximateReceiveCount": "1", - "SentTimestamp": "1545082649183", - "SenderId": "AIDAIENQZJOLO23YVJ4VO", - "ApproximateFirstReceiveTimestamp": "1545082649185" - }, - "messageAttributes": { - "testAttr": { - "stringValue": "100", - "binaryValue": "base64Str", - "dataType": "Number" - } - }, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", - "awsRegion": "us-east-2" - } - ] - } - ``` - -=== "dataclass_sample.py" - - ```python hl_lines="3-4 23 32" - from dataclasses import dataclass - - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="order_id", # see Choosing a payload subset section - use_local_cache=True, - ) - - @dataclass - class OrderItem: - sku: str - description: str - - @dataclass - class Order: - item: OrderItem - order_id: int - - - @idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) - def process_order(order: Order): - return f"processed order {order.order_id}" - - - order_item = OrderItem(sku="fake", description="sample") - order = Order(item=order_item, order_id="fake-id") - - # `order` parameter must be called as a keyword argument to work - process_order(order=order) - ``` - -=== "parser_pydantic_sample.py" - - ```python hl_lines="1-2 22 31" - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - from aws_lambda_powertools.utilities.parser import BaseModel - - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="order_id", # see Choosing a payload subset section - use_local_cache=True, - ) - - - class OrderItem(BaseModel): - sku: str - description: str - - - class Order(BaseModel): - item: OrderItem - order_id: int - - - @idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) - def process_order(order: Order): - return f"processed order {order.order_id}" - - - order_item = OrderItem(sku="fake", description="sample") - order = Order(item=order_item, order_id="fake-id") - - # `order` parameter must be called as a keyword argument to work - process_order(order=order) - ``` - -### Choosing a payload subset for idempotency - -???+ tip "Tip: Dealing with always changing payloads" - When dealing with a more elaborate payload, where parts of the payload always change, you should use **`event_key_jmespath`** parameter. - -Use [`IdempotencyConfig`](#customizing-the-default-behavior) to instruct the idempotent decorator to only use a portion of your payload to verify whether a request is idempotent, and therefore it should not be retried. - -> **Payment scenario** - -In this example, we have a Lambda handler that creates a payment for a user subscribing to a product. We want to ensure that we don't accidentally charge our customer by subscribing them more than once. - -Imagine the function executes successfully, but the client never receives the response due to a connection issue. It is safe to retry in this instance, as the idempotent decorator will return a previously saved response. - -???+ warning "Warning: Idempotency for JSON payloads" - The payload extracted by the `event_key_jmespath` is treated as a string by default, so will be sensitive to differences in whitespace even when the JSON payload itself is identical. - - To alter this behaviour, we can use the [JMESPath built-in function](jmespath_functions.md#powertools_json-function) `powertools_json()` to treat the payload as a JSON object (dict) rather than a string. - -=== "payment.py" - - ```python hl_lines="2-4 10 12 15 20" - import json - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - # Treat everything under the "body" key - # in the event json object as our payload - config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - body = json.loads(event['body']) - payment = create_subscription_payment( - user=body['user'], - product=body['product_id'] - ) - ... - return { - "payment_id": payment.id, - "message": "success", - "statusCode": 200 - } - ``` - -=== "Example event" - - ```json hl_lines="28" - { - "version":"2.0", - "routeKey":"ANY /createpayment", - "rawPath":"/createpayment", - "rawQueryString":"", - "headers": { - "Header1": "value1", - "Header2": "value2" - }, - "requestContext":{ - "accountId":"123456789012", - "apiId":"api-id", - "domainName":"id.execute-api.us-east-1.amazonaws.com", - "domainPrefix":"id", - "http":{ - "method":"POST", - "path":"/createpayment", - "protocol":"HTTP/1.1", - "sourceIp":"ip", - "userAgent":"agent" - }, - "requestId":"id", - "routeKey":"ANY /createpayment", - "stage":"$default", - "time":"10/Feb/2021:13:40:43 +0000", - "timeEpoch":1612964443723 - }, - "body":"{\"user\":\"xyz\",\"product_id\":\"123456789\"}", - "isBase64Encoded":false - } - ``` - - -### Idempotency request flow - -This sequence diagram shows an example flow of what happens in the payment scenario: - -![Idempotent sequence](../media/idempotent_sequence.png) - -The client was successful in receiving the result after the retry. Since the Lambda handler was only executed once, our customer hasn't been charged twice. - -???+ note - Bear in mind that the entire Lambda handler is treated as a single idempotent operation. If your Lambda handler can cause multiple side effects, consider splitting it into separate functions. - -### Handling exceptions - -If you are using the `idempotent` decorator on your Lambda handler, any unhandled exceptions that are raised during the code execution will cause **the record in the persistence layer to be deleted**. -This means that new invocations will execute your code again despite having the same payload. If you don't want the record to be deleted, you need to catch exceptions within the idempotent function and return a successful response. - - -![Idempotent sequence exception](../media/idempotent_sequence_exception.png) - -If you are using `idempotent_function`, any unhandled exceptions that are raised _inside_ the decorated function will cause the record in the persistence layer to be deleted, and allow the function to be executed again if retried. - -If an Exception is raised _outside_ the scope of the decorated function and after your function has been called, the persistent record will not be affected. In this case, idempotency will be maintained for your decorated function. Example: - -```python hl_lines="2-4 8-10" title="Exception not affecting idempotency record sample" -def lambda_handler(event, context): - # If an exception is raised here, no idempotent record will ever get created as the - # idempotent function does not get called - do_some_stuff() - - result = call_external_service(data={"user": "user1", "id": 5}) - - # This exception will not cause the idempotent record to be deleted, since it - # happens after the decorated function has been successfully called - raise Exception - - -@idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) -def call_external_service(data: dict, **kwargs): - result = requests.post('http://example.com', json={"user": data['user'], "transaction_id": data['id']} - return result.json() -``` - -???+ warning - **We will raise `IdempotencyPersistenceLayerError`** if any of the calls to the persistence layer fail unexpectedly. - - As this happens outside the scope of your decorated function, you are not able to catch it if you're using the `idempotent` decorator on your Lambda handler. - -### Persistence layers - -#### DynamoDBPersistenceLayer - -This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). - -```python hl_lines="5-9" title="Customizing DynamoDBPersistenceLayer to suit your table structure" -from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer - -persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - key_attr="idempotency_key", - expiry_attr="expires_at", - status_attr="current_status", - data_attr="result_data", - validation_key_attr="validation_key", -) -``` - -When using DynamoDB as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: - -Parameter | Required | Default | Description -------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- -**table_name** | :heavy_check_mark: | | Table name to store state -**key_attr** | | `id` | Partition key of the table. Hashed representation of the payload (unless **sort_key_attr** is specified) -**expiry_attr** | | `expiration` | Unix timestamp of when record expires -**status_attr** | | `status` | Stores status of the lambda execution during and after invocation -**data_attr** | | `data` | Stores results of successfully executed Lambda handlers -**validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation -**sort_key_attr** | | | Sort key of the table (if table is configured with a sort key). -**static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. - -## Advanced - -### Customizing the default behavior - -Idempotent decorator can be further configured with **`IdempotencyConfig`** as seen in the previous example. These are the available options for further configuration - -Parameter | Default | Description -------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- -**event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record using [built-in functions](/utilities/jmespath_functions) -**payload_validation_jmespath** | `""` | JMESPath expression to validate whether certain parameters have changed in the event while the event payload -**raise_on_no_idempotency_key** | `False` | Raise exception if no idempotency key was found in the request -**expires_after_seconds** | 3600 | The number of seconds to wait before a record is expired -**use_local_cache** | `False` | Whether to locally cache idempotency results -**local_cache_max_items** | 256 | Max number of items to store in local cache -**hash_function** | `md5` | Function to use for calculating hashes, as provided by [hashlib](https://docs.python.org/3/library/hashlib.html) in the standard library. - -### Handling concurrent executions with the same payload - -This utility will raise an **`IdempotencyAlreadyInProgressError`** exception if you receive **multiple invocations with the same payload while the first invocation hasn't completed yet**. - -???+ info - If you receive `IdempotencyAlreadyInProgressError`, you can safely retry the operation. - -This is a locking mechanism for correctness. Since we don't know the result from the first invocation yet, we can't safely allow another concurrent execution. - -### Using in-memory cache - -**By default, in-memory local caching is disabled**, since we don't know how much memory you consume per invocation compared to the maximum configured in your Lambda function. - -???+ note "Note: This in-memory cache is local to each Lambda execution environment" - This means it will be effective in cases where your function's concurrency is low in comparison to the number of "retry" invocations with the same payload, because cache might be empty. - -You can enable in-memory caching with the **`use_local_cache`** parameter: - -```python hl_lines="8 11" title="Caching idempotent transactions in-memory to prevent multiple calls to storage" -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") -config = IdempotencyConfig( - event_key_jmespath="body", - use_local_cache=True, -) - -@idempotent(config=config, persistence_store=persistence_layer) -def handler(event, context): - ... -``` - -When enabled, the default is to cache a maximum of 256 records in each Lambda execution environment - You can change it with the **`local_cache_max_items`** parameter. - -### Expiring idempotency records - -???+ note - By default, we expire idempotency records after **an hour** (3600 seconds). - -In most cases, it is not desirable to store the idempotency records forever. Rather, you want to guarantee that the same payload won't be executed within a period of time. - -You can change this window with the **`expires_after_seconds`** parameter: - -```python hl_lines="8 11" title="Adjusting cache TTL" -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") -config = IdempotencyConfig( - event_key_jmespath="body", - expires_after_seconds=5*60, # 5 minutes -) - -@idempotent(config=config, persistence_store=persistence_layer) -def handler(event, context): - ... -``` - -This will mark any records older than 5 minutes as expired, and the lambda handler will be executed as normal if it is invoked with a matching payload. - -???+ note "Note: DynamoDB time-to-live field" - This utility uses **`expiration`** as the TTL field in DynamoDB, as [demonstrated in the SAM example earlier](#required-resources). - -### Payload validation - -???+ question "Question: What if your function is invoked with the same payload except some outer parameters have changed?" - Example: A payment transaction for a given productID was requested twice for the same customer, **however the amount to be paid has changed in the second transaction**. - -By default, we will return the same result as it returned before, however in this instance it may be misleading; we provide a fail fast payload validation to address this edge case. - -With **`payload_validation_jmespath`**, you can provide an additional JMESPath expression to specify which part of the event body should be validated against previous idempotent invocations - -=== "app.py" - - ```python hl_lines="7 11 18 25" - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - config = IdempotencyConfig( - event_key_jmespath="[userDetail, productId]", - payload_validation_jmespath="amount" - ) - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - # Creating a subscription payment is a side - # effect of calling this function! - payment = create_subscription_payment( - user=event['userDetail']['username'], - product=event['product_id'], - amount=event['amount'] - ) - ... - return { - "message": "success", - "statusCode": 200, - "payment_id": payment.id, - "amount": payment.amount - } - ``` - -=== "Example Event 1" - - ```json hl_lines="8" - { - "userDetail": { - "username": "User1", - "user_email": "user@example.com" - }, - "productId": 1500, - "charge_type": "subscription", - "amount": 500 - } - ``` - -=== "Example Event 2" - - ```json hl_lines="8" - { - "userDetail": { - "username": "User1", - "user_email": "user@example.com" - }, - "productId": 1500, - "charge_type": "subscription", - "amount": 1 - } - ``` - -In this example, the **`userDetail`** and **`productId`** keys are used as the payload to generate the idempotency key, as per **`event_key_jmespath`** parameter. - -???+ note - If we try to send the same request but with a different amount, we will raise **`IdempotencyValidationError`**. - -Without payload validation, we would have returned the same result as we did for the initial request. Since we're also returning an amount in the response, this could be quite confusing for the client. - -By using **`payload_validation_jmespath="amount"`**, we prevent this potentially confusing behavior and instead raise an Exception. - -### Making idempotency key required - -If you want to enforce that an idempotency key is required, you can set **`raise_on_no_idempotency_key`** to `True`. - -This means that we will raise **`IdempotencyKeyError`** if the evaluation of **`event_key_jmespath`** is `None`. - -=== "app.py" - - ```python hl_lines="9-10 13" - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - # Requires "user"."uid" and "order_id" to be present - config = IdempotencyConfig( - event_key_jmespath="[user.uid, order_id]", - raise_on_no_idempotency_key=True, - ) - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - pass - ``` - -=== "Success Event" - - ```json hl_lines="3 6" - { - "user": { - "uid": "BB0D045C-8878-40C8-889E-38B3CB0A61B1", - "name": "Foo" - }, - "order_id": 10000 - } - ``` - -=== "Failure Event" - - Notice that `order_id` is now accidentally within `user` key - - ```json hl_lines="3 5" - { - "user": { - "uid": "DE0D000E-1234-10D1-991E-EAC1DD1D52C8", - "name": "Joe Bloggs", - "order_id": 10000 - }, - } - ``` - -### Customizing boto configuration - -The **`boto_config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when constructing the persistence store. - -=== "Custom session" - - ```python hl_lines="1 6 9 14" - import boto3 - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - boto3_session = boto3.session.Session() - persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - boto3_session=boto3_session - ) - - config = IdempotencyConfig(event_key_jmespath="body") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - ... - ``` -=== "Custom config" - - ```python hl_lines="1 7 10" - from botocore.config import Config - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - config = IdempotencyConfig(event_key_jmespath="body") - boto_config = Config() - persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - boto_config=boto_config - ) - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - ... - ``` - -### Using a DynamoDB table with a composite primary key - -When using a composite primary key table (hash+range key), use `sort_key_attr` parameter when initializing your persistence layer. - -With this setting, we will save the idempotency key in the sort key instead of the primary key. By default, the primary key will now be set to `idempotency#{LAMBDA_FUNCTION_NAME}`. - -You can optionally set a static value for the partition key using the `static_pk_value` parameter. - -```python hl_lines="5" title="Reusing a DynamoDB table that uses a composite primary key" -from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent - -persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - sort_key_attr='sort_key') - - -@idempotent(persistence_store=persistence_layer) -def handler(event, context): - return {"message": "success": "id": event['body']['id]} -``` - -The example function above would cause data to be stored in DynamoDB like this: - -| id | sort_key | expiration | status | data | -|------------------------------|----------------------------------|------------|-------------|-------------------------------------| -| idempotency#MyLambdaFunction | 1e956ef7da78d0cb890be999aecc0c9e | 1636549553 | COMPLETED | {"id": 12391, "message": "success"} | -| idempotency#MyLambdaFunction | 2b2cdb5f86361e97b4383087c1ffdf27 | 1636549571 | COMPLETED | {"id": 527212, "message": "success"}| -| idempotency#MyLambdaFunction | f091d2527ad1c78f05d54cc3f363be80 | 1636549585 | IN_PROGRESS | | - -### Bring your own persistent store - -This utility provides an abstract base class (ABC), so that you can implement your choice of persistent storage layer. - -You can inherit from the `BasePersistenceLayer` class and implement the abstract methods `_get_record`, `_put_record`, -`_update_record` and `_delete_record`. - -```python hl_lines="8-13 57 65 74 96 124" title="Excerpt DynamoDB Persisntence Layer implementation for reference" -import datetime -import logging -from typing import Any, Dict, Optional - -import boto3 -from botocore.config import Config - -from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer -from aws_lambda_powertools.utilities.idempotency.exceptions import ( - IdempotencyItemAlreadyExistsError, - IdempotencyItemNotFoundError, -) -from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord - -logger = logging.getLogger(__name__) - - -class DynamoDBPersistenceLayer(BasePersistenceLayer): - def __init__( - self, - table_name: str, - key_attr: str = "id", - expiry_attr: str = "expiration", - status_attr: str = "status", - data_attr: str = "data", - validation_key_attr: str = "validation", - boto_config: Optional[Config] = None, - boto3_session: Optional[boto3.session.Session] = None, - ): - boto_config = boto_config or Config() - session = boto3_session or boto3.session.Session() - self._ddb_resource = session.resource("dynamodb", config=boto_config) - self.table_name = table_name - self.table = self._ddb_resource.Table(self.table_name) - self.key_attr = key_attr - self.expiry_attr = expiry_attr - self.status_attr = status_attr - self.data_attr = data_attr - self.validation_key_attr = validation_key_attr - super(DynamoDBPersistenceLayer, self).__init__() - - def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: - """ - Translate raw item records from DynamoDB to DataRecord - - Parameters - ---------- - item: Dict[str, Union[str, int]] - Item format from dynamodb response - - Returns - ------- - DataRecord - representation of item - - """ - return DataRecord( - idempotency_key=item[self.key_attr], - status=item[self.status_attr], - expiry_timestamp=item[self.expiry_attr], - response_data=item.get(self.data_attr), - payload_hash=item.get(self.validation_key_attr), - ) - - def _get_record(self, idempotency_key) -> DataRecord: - response = self.table.get_item(Key={self.key_attr: idempotency_key}, ConsistentRead=True) - - try: - item = response["Item"] - except KeyError: - raise IdempotencyItemNotFoundError - return self._item_to_data_record(item) - - def _put_record(self, data_record: DataRecord) -> None: - item = { - self.key_attr: data_record.idempotency_key, - self.expiry_attr: data_record.expiry_timestamp, - self.status_attr: data_record.status, - } - - if self.payload_validation_enabled: - item[self.validation_key_attr] = data_record.payload_hash - - now = datetime.datetime.now() - try: - logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") - self.table.put_item( - Item=item, - ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", - ExpressionAttributeValues={":now": int(now.timestamp())}, - ) - except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: - logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") - raise IdempotencyItemAlreadyExistsError - - def _update_record(self, data_record: DataRecord): - logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - update_expression = "SET #response_data = :response_data, #expiry = :expiry, #status = :status" - expression_attr_values = { - ":expiry": data_record.expiry_timestamp, - ":response_data": data_record.response_data, - ":status": data_record.status, - } - expression_attr_names = { - "#response_data": self.data_attr, - "#expiry": self.expiry_attr, - "#status": self.status_attr, - } - - if self.payload_validation_enabled: - update_expression += ", #validation_key = :validation_key" - expression_attr_values[":validation_key"] = data_record.payload_hash - expression_attr_names["#validation_key"] = self.validation_key_attr - - kwargs = { - "Key": {self.key_attr: data_record.idempotency_key}, - "UpdateExpression": update_expression, - "ExpressionAttributeValues": expression_attr_values, - "ExpressionAttributeNames": expression_attr_names, - } - - self.table.update_item(**kwargs) - - def _delete_record(self, data_record: DataRecord) -> None: - logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") - self.table.delete_item(Key={self.key_attr: data_record.idempotency_key},) -``` - -???+ danger - Pay attention to the documentation for each - you may need to perform additional checks inside these methods to ensure the idempotency guarantees remain intact. - - For example, the `_put_record` method needs to raise an exception if a non-expired record already exists in the data store with a matching key. - -## Compatibility with other utilities - -### Validation utility - -The idempotency utility can be used with the `validator` decorator. Ensure that idempotency is the innermost decorator. - -???+ warning - If you use an envelope with the validator, the event received by the idempotency utility will be the unwrapped - event - not the "raw" event Lambda was invoked with. - - Make sure to account for this behaviour, if you set the `event_key_jmespath`. - -```python hl_lines="9 10" title="Using Idempotency with JSONSchema Validation utility" -from aws_lambda_powertools.utilities.validation import validator, envelopes -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -config = IdempotencyConfig(event_key_jmespath="[message, username]") -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - -@validator(envelope=envelopes.API_GATEWAY_HTTP) -@idempotent(config=config, persistence_store=persistence_layer) -def lambda_handler(event, context): - cause_some_side_effects(event['username') - return {"message": event['message'], "statusCode": 200} -``` - -???+ tip "Tip: JMESPath Powertools functions are also available" - Built-in functions known in the validation utility like `powertools_json`, `powertools_base64`, `powertools_base64_gzip` are also available to use in this utility. - - -## Testing your code - -The idempotency utility provides several routes to test your code. - -### Disabling the idempotency utility -When testing your code, you may wish to disable the idempotency logic altogether and focus on testing your business logic. To do this, you can set the environment variable `POWERTOOLS_IDEMPOTENCY_DISABLED` -with a truthy value. If you prefer setting this for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html) fixture: - -=== "tests.py" - - ```python hl_lines="2 3" - def test_idempotent_lambda_handler(monkeypatch): - # Set POWERTOOLS_IDEMPOTENCY_DISABLED before calling decorated functions - monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", 1) - - result = handler() - ... - ``` -=== "app.py" - - ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } - ``` - -### Testing with DynamoDB Local - -To test with [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.DownloadingAndRunning.html), you can replace the `Table` resource used by the persistence layer with one you create inside your tests. This allows you to set the endpoint_url. - -=== "tests.py" - - ```python hl_lines="6 7 8" - import boto3 - - import app - - def test_idempotent_lambda(): - # Create our own Table resource using the endpoint for our DynamoDB Local instance - resource = boto3.resource("dynamodb", endpoint_url='http://localhost:8000') - table = resource.Table(app.persistence_layer.table_name) - app.persistence_layer.table = table - - result = app.handler({'testkey': 'testvalue'}, {}) - assert result['payment_id'] == 12345 - ``` - -=== "app.py" - - ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } - ``` - -### How do I mock all DynamoDB I/O operations - -The idempotency utility lazily creates the dynamodb [Table](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table) which it uses to access DynamoDB. -This means it is possible to pass a mocked Table resource, or stub various methods. - -=== "tests.py" - - ```python hl_lines="6 7 8 9" - from unittest.mock import MagicMock - - import app - - def test_idempotent_lambda(): - table = MagicMock() - app.persistence_layer.table = table - result = app.handler({'testkey': 'testvalue'}, {}) - table.put_item.assert_called() - ... - ``` - -=== "app.py" - - ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } - ``` - -## Extra resources - -If you're interested in a deep dive on how Amazon uses idempotency when building our APIs, check out -[this article](https://aws.amazon.com/builders-library/making-retries-safe-with-idempotent-APIs/). diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md index c6b99d1938b..5ea8ec7f2fa 100644 --- a/docs/tutorial/index.md +++ b/docs/tutorial/index.md @@ -227,75 +227,75 @@ A first attempt at the routing logic might look similar to the following code sn === "app.py" ```python hl_lines="4 9 13 27-29 35-36" - import json + import json - def hello_name(event, **kargs): - username = event["pathParameters"]["name"] - return {"statusCode": 200, "body": json.dumps({"message": f"hello {username}!"})} + def hello_name(event, **kargs): + username = event["pathParameters"]["name"] + return {"statusCode": 200, "body": json.dumps({"message": f"hello {username}!"})} - def hello(**kargs): - return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} + def hello(**kargs): + return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} - class Router: - def __init__(self): - self.routes = {} + class Router: + def __init__(self): + self.routes = {} - def set(self, path, method, handler): - self.routes[f"{path}-{method}"] = handler + def set(self, path, method, handler): + self.routes[f"{path}-{method}"] = handler - def get(self, path, method): - try: - route = self.routes[f"{path}-{method}"] - except KeyError: - raise RuntimeError(f"Cannot route request to the correct method. path={path}, method={method}") - return route + def get(self, path, method): + try: + route = self.routes[f"{path}-{method}"] + except KeyError: + raise RuntimeError(f"Cannot route request to the correct method. path={path}, method={method}") + return route - router = Router() - router.set(path="/hello", method="GET", handler=hello) - router.set(path="/hello/{name}", method="GET", handler=hello_name) + router = Router() + router.set(path="/hello", method="GET", handler=hello) + router.set(path="/hello/{name}", method="GET", handler=hello_name) - def lambda_handler(event, context): - path = event["resource"] - http_method = event["httpMethod"] - method = router.get(path=path, method=http_method) - return method(event=event) + def lambda_handler(event, context): + path = event["resource"] + http_method = event["httpMethod"] + method = router.get(path=path, method=http_method) + return method(event=event) ``` === "template.yaml" ```yaml hl_lines="15-24" - AWSTemplateFormatVersion: "2010-09-09" - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - HelloWorldName: - Type: Api - Properties: - Path: /hello/{name} - Method: get - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + AWSTemplateFormatVersion: "2010-09-09" + Transform: AWS::Serverless-2016-10-31 + Description: Sample SAM Template for powertools-quickstart + Globals: + Function: + Timeout: 3 + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get + HelloWorldName: + Type: Api + Properties: + Path: /hello/{name} + Method: get + Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" ``` Let's break this down: @@ -312,19 +312,19 @@ However, it forces us to understand the internal structure of the API Gateway re ### Simplifying with Event Handler -We can massively simplify cross-cutting concerns while keeping it lightweight by using [Event Handler](./core/event_handler/api_gateway.md){target="_blank"}. +We can massively simplify cross-cutting concerns while keeping it lightweight by using [Event Handler](../core/event_handler/api_gateway.md){target="_blank"}. ???+ tip - This is available for both [REST API (API Gateway, ALB)](./core/event_handler/api_gateway.md){target="_blank"} and [GraphQL API (AppSync)](./core/event_handler/appsync.md){target="_blank"}. + This is available for both [REST API (API Gateway, ALB)](../core/event_handler/api_gateway.md){target="_blank"} and [GraphQL API (AppSync)](../core/event_handler/appsync.md){target="_blank"}. Let's include Lambda Powertools as a dependency in `requirement.txt`, and use Event Handler to refactor our previous example. === "app.py" ```python hl_lines="1 3 6 11 17" - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello/") @@ -349,13 +349,13 @@ Let's include Lambda Powertools as a dependency in `requirement.txt`, and use Ev Use `sam build && sam local start-api` and try run it locally again. ???+ note - If you're coming from [Flask](https://flask.palletsprojects.com/en/2.0.x/){target="_blank"}, you will be familiar with this experience already. [Event Handler for API Gateway](./core/event_handler/api_gateway.md){target="_blank"} uses `ApiGatewayResolver` to give a Flask-like experience while staying true to our tenet `Keep it lean`. + If you're coming from [Flask](https://flask.palletsprojects.com/en/2.0.x/){target="_blank"}, you will be familiar with this experience already. [Event Handler for API Gateway](../core/event_handler/api_gateway.md){target="_blank"} uses `APIGatewayRestResolver` to give a Flask-like experience while staying true to our tenet `Keep it lean`. We have added the route annotation as the decorator for our methods. It enables us to use the parameters passed in the request directly, and our responses are simply dictionaries. Lastly, we used `return app.resolve(event, context)` so Event Handler can resolve routes, inject the current request, handle serialization, route validation, etc. -From here, we could handle [404 routes](./core/event_handler/api_gateway.md#handling-not-found-routes){target="_blank"}, [error handling](./core/event_handler/api_gateway.md#http://127.0.0.1:8000/core/event_handler/api_gateway/#exception-handling){target="_blank"}, [access query strings, payload](./core/event_handler/api_gateway.md#http://127.0.0.1:8000/core/event_handler/api_gateway#accessing-request-details){target="_blank"}, etc. +From here, we could handle [404 routes](../core/event_handler/api_gateway.md#handling-not-found-routes){target="_blank"}, [error handling](../core/event_handler/api_gateway.md#exception-handling){target="_blank"}, [access query strings, payload](../core/event_handler/api_gateway.md#accessing-request-details){target="_blank"}, etc. ???+ tip @@ -379,7 +379,7 @@ The first option could be to use the standard Python Logger, and use a specializ import os from pythonjsonlogger import jsonlogger - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver logger = logging.getLogger("APP") logHandler = logging.StreamHandler() @@ -388,7 +388,7 @@ The first option could be to use the standard Python Logger, and use a specializ logger.addHandler(logHandler) logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello/") @@ -449,16 +449,16 @@ We could start by creating a dictionary with Lambda context information or somet ???+ question "Surely this could be easier, right?" Yes! Powertools Logger to the rescue :-) -As we already have Lambda Powertools as a dependency, we can simply import [Logger](./core/logger.md){target="_blank"}. +As we already have Lambda Powertools as a dependency, we can simply import [Logger](../core/logger.md){target="_blank"}. ```python title="Refactoring with Lambda Powertools Logger" hl_lines="1 3 5 12 18 22" from aws_lambda_powertools import Logger -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths logger = Logger(service="APP") -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get("/hello/") @@ -482,7 +482,7 @@ Let's break this down: * **L5**: We add Lambda Powertools Logger; the boilerplate is now done for you. By default, we set `INFO` as the logging level if `LOG_LEVEL` env var isn't set. * **L22**: We use `logger.inject_lambda_context` decorator to inject key information from Lambda context into every log. -* **L22**: We also instruct Logger to use the incoming API Gateway Request ID as a [correlation id](./core/logger.md##set_correlation_id-method) automatically. +* **L22**: We also instruct Logger to use the incoming API Gateway Request ID as a [correlation id](../core/logger.md##set_correlation_id-method) automatically. * **L22**: Since we're in dev, we also use `log_event=True` to automatically log each incoming request for debugging. This can be also set via [environment variables](./index.md#environment-variables){target="_blank"}. @@ -506,7 +506,7 @@ This is how the logs would look like now: We can now search our logs by the request ID to find a specific operation. Additionally, we can also search our logs for function name, Lambda request ID, Lambda function ARN, find out whether an operation was a cold start, etc. -From here, we could [set specific keys](./core/logger.md#append_keys-method){target="_blank"} to add additional contextual information about a given operation, [log exceptions](./core/logger.md#logging-exceptions){target="_blank"} to easily enumerate them later, [sample debug logs](./core/logger.md#sampling-debug-logs){target="_blank"}, etc. +From here, we could [set specific keys](../core/logger.md#append_keys-method){target="_blank"} to add additional contextual information about a given operation, [log exceptions](../core/logger.md#logging-exceptions){target="_blank"} to easily enumerate them later, [sample debug logs](../core/logger.md#sampling-debug-logs){target="_blank"}, etc. By having structured logs like this, we can easily search and analyse them in [CloudWatch Logs Insight](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AnalyzingLogData.html){target="_blank"}. @@ -531,7 +531,7 @@ It's a [two-step process](https://docs.aws.amazon.com/lambda/latest/dg/services- 1. Enable tracing in your Lambda function. 2. Instrument your application code. -Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/index.html){target="_blank"}, and then simplify it with [Lambda Powertools Tracer](core/tracer.md){target="_blank"} feature. +Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/index.html){target="_blank"}, and then simplify it with [Lambda Powertools Tracer](../core/tracer.md){target="_blank"} feature. === "app.py" @@ -539,12 +539,12 @@ Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.a from aws_xray_sdk.core import xray_recorder from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths logger = Logger(service="APP") - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @app.get("/hello/") @@ -649,12 +649,12 @@ Let's put them into action. from aws_xray_sdk.core import patch_all, xray_recorder from aws_lambda_powertools import Logger -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths logger = Logger(service="APP") -app = ApiGatewayResolver() +app = APIGatewayRestResolver() cold_start = True patch_all() @@ -719,19 +719,19 @@ If you choose any of the traces available, try opening the `handler` subsegment Cross-cutting concerns like filtering traces by Cold Start, including response as well as exceptions as tracing metadata can take a considerable amount of boilerplate. -We can simplify our previous patterns by using [Lambda Powertools Tracer](core/tracer.md){target="_blank"}; a thin wrapper on top of X-Ray SDK. +We can simplify our previous patterns by using [Lambda Powertools Tracer](../core/tracer.md){target="_blank"}; a thin wrapper on top of X-Ray SDK. ???+ note You can now safely remove `aws-xray-sdk` from `requirements.txt`; keep `aws-lambda-powertools` only. ```python title="Refactoring with Lambda Powertools Tracer" hl_lines="1 6 11 13 19 21 27" from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths logger = Logger(service="APP") tracer = Tracer(service="APP") -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get("/hello/") @@ -768,7 +768,7 @@ Another subtle difference is that you can now run your Lambda functions and unit Lambda Powertools optimizes for Lambda compute environment. As such, we add these and other common approaches to accelerate your development, so you don't worry about implementing every cross-cutting concern. ???+ tip - You can [opt-out some of these behaviours](./core/tracer/#advanced){target="_blank"} like disabling response capturing, explicitly patching only X modules, etc. + You can [opt-out some of these behaviours](../core/tracer/#advanced){target="_blank"} like disabling response capturing, explicitly patching only X modules, etc. Repeat the process of building, deploying, and invoking your application via the API endpoint. Within the [AWS X-Ray Console](https://console.aws.amazon.com/xray/home#/traces/){target="_blank"}, you should see a similar view: @@ -806,7 +806,7 @@ Let's expand our application with custom metrics using AWS SDK to see how it wor import boto3 from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths cold_start = True @@ -815,14 +815,14 @@ Let's expand our application with custom metrics using AWS SDK to see how it wor logger = Logger(service="APP") tracer = Tracer(service="APP") metrics = boto3.client("cloudwatch") - app = ApiGatewayResolver() + app = APIGatewayRestResolver() @tracer.capture_method def add_greeting_metric(service: str = "APP"): function_name = os.getenv("AWS_LAMBDA_FUNCTION_NAME", "undefined") service_dimension = {"Name": "service", "Value": service} - function_dimension = {"Name": "function_name": "Value": function_name} + function_dimension = {"Name": "function_name", "Value": function_name} is_cold_start = True global cold_start @@ -930,18 +930,18 @@ Within `template.yaml`, we add [CloudWatchPutMetricPolicy](https://docs.aws.amaz ### Simplifying with Metrics -[Lambda Powertools Metrics](./core/metrics.md){target="_blank} uses [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html) to create custom metrics **asynchronously** via a native integration with Lambda. +[Lambda Powertools Metrics](../core/metrics.md){target="_blank} uses [Amazon CloudWatch Embedded Metric Format (EMF)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format.html) to create custom metrics **asynchronously** via a native integration with Lambda. In general terms, EMF is a specification that expects metrics in a JSON payload within CloudWatch Logs. Lambda ingests all logs emitted by a given function into CloudWatch Logs. CloudWatch automatically looks up for log entries that follow the EMF format and transforms them into a CloudWatch metric. ???+ info If you are interested in the details of the EMF mechanism, follow [blog post](https://aws.amazon.com/blogs/mt/enhancing-workload-observability-using-amazon-cloudwatch-embedded-metric-format/){target="_blank"}. -Let's implement that using [Metrics](./core/metrics.md){target="_blank}: +Let's implement that using [Metrics](../core/metrics.md){target="_blank}: ```python title="Refactoring with Lambda Powertools Metrics" hl_lines="1 4 9 18 27 33" from aws_lambda_powertools import Logger, Tracer, Metrics -from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver +from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths from aws_lambda_powertools.metrics import MetricUnit @@ -949,7 +949,7 @@ from aws_lambda_powertools.metrics import MetricUnit logger = Logger(service="APP") tracer = Tracer(service="APP") metrics = Metrics(namespace="MyApp", service="APP") -app = ApiGatewayResolver() +app = APIGatewayRestResolver() @app.get("/hello/") @@ -986,7 +986,7 @@ That's a lot less boilerplate code! Let's break this down: * **L9**: We initialize `Metrics` with our service name (`APP`) and metrics namespace (`MyApp`), reducing the need to add the `service` dimension for every metric and setting the namespace later * **L18, 27**: We use `add_metric` similarly to our custom function, except we now have an enum `MetricCount` to help us understand which Metric Units we have at our disposal * **L33**: We use `@metrics.log_metrics` decorator to ensure that our metrics are aligned with the EMF output and validated before-hand, like in case we forget to set namespace, or accidentally use a metric unit as a string that doesn't exist in CloudWatch. -* **L33**: We also use `capture_cold_start_metric=True` so we don't have to handle that logic either. Note that [Metrics](./core/metrics.md){target="_blank"} does not publish a warm invocation metric (ColdStart=0) for cost reasons. As such, treat the absence (sparse metric) as a non-cold start invocation. +* **L33**: We also use `capture_cold_start_metric=True` so we don't have to handle that logic either. Note that [Metrics](../core/metrics.md){target="_blank"} does not publish a warm invocation metric (ColdStart=0) for cost reasons. As such, treat the absence (sparse metric) as a non-cold start invocation. Repeat the process of building, deploying, and invoking your application via the API endpoint a few times to generate metrics - [Artillery](https://www.artillery.io/){target="_blank"} and [K6.io](https://k6.io/open-source){target="_blank"} are quick ways to generate some load. Within [CloudWatch Metrics view](https://console.aws.amazon.com/cloudwatch/home#metricsV2:graph=~()){target="_blank}, you should see `MyApp` custom namespace with your custom metrics there and `SuccessfulGreetings` available to graph. @@ -1028,7 +1028,7 @@ If you're curious about how the EMF portion of your function logs look like, you We covered a lot of ground here and we only scratched the surface of the feature set available within Lambda Powertools. -When it comes to the observability features ([Tracer](./core/tracer.md){target="_blank"}, [Metrics](./core/metrics.md){target="_blank"}, [Logging](./core/logger.md){target="_blank"}), don't stop there! The goal here is to ensure you can ask arbitrary questions to assess your system's health; these features are only part of the wider story! +When it comes to the observability features ([Tracer](../core/tracer.md){target="_blank"}, [Metrics](../core/metrics.md){target="_blank"}, [Logging](../core/logger.md){target="_blank"}), don't stop there! The goal here is to ensure you can ask arbitrary questions to assess your system's health; these features are only part of the wider story! This requires a change in mindset to ensure operational excellence is part of the software development lifecycle. diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 416b768e77e..95efc5d051c 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -291,10 +291,10 @@ You can use `get_enabled_features` method for scenarios where you need a list of === "app.py" ```python hl_lines="17-20 23" - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - app = ApiGatewayResolver() + app = APIGatewayRestResolver() app_config = AppConfigStore( environment="dev", @@ -453,6 +453,8 @@ By default, we cache configuration retrieved from the Store for 5 seconds for pe You can override `max_age` parameter when instantiating the store. +=== "app.py" + ```python hl_lines="7" from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore @@ -677,11 +679,13 @@ Parameter | Default | Description **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools Logger. -```python hl_lines="19-25" title="AppConfigStore sample" +```python hl_lines="21-27" title="AppConfigStore sample" from botocore.config import Config import jmespath +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore + boto_config = Config(read_timeout=10, retries={"total_max_attempts": 2}) # Custom JMESPath functions @@ -715,9 +719,7 @@ You can unit test your feature flags locally and independently without setting u ???+ warning This excerpt relies on `pytest` and `pytest-mock` dependencies. -```python hl_lines="9-11" title="Unit testing feature flags" -from typing import Dict, List, Optional - +```python hl_lines="7-9" title="Unit testing feature flags" from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore, RuleAction diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index 395f24b5a76..d02a3feb73a 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -515,3 +515,76 @@ The **`config`** and **`boto3_session`** parameters enable you to pass in a cust value = ssm_provider.get("/my/parameter") ... ``` + +## Testing your code + +For unit testing your applications, you can mock the calls to the parameters utility to avoid calling AWS APIs. This +can be achieved in a number of ways - in this example, we use the [pytest monkeypatch fixture](https://docs.pytest.org/en/latest/how-to/monkeypatch.html) +to patch the `parameters.get_parameter` method: + +=== "tests.py" + ```python + from src import index + + def test_handler(monkeypatch): + + def mockreturn(name): + return "mock_value" + + monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) + return_val = index.handler({}, {}) + assert return_val.get('message') == 'mock_value' + ``` + +=== "src/index.py" + ```python + from aws_lambda_powertools.utilities import parameters + + def handler(event, context): + # Retrieve a single parameter + value = parameters.get_parameter("my-parameter-name") + return {"message": value} + ``` + +If we need to use this pattern across multiple tests, we can avoid repetition by refactoring to use our own pytest fixture: + +=== "tests.py" + ```python + import pytest + + from src import index + + @pytest.fixture + def mock_parameter_response(monkeypatch): + def mockreturn(name): + return "mock_value" + + monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) + + # Pass our fixture as an argument to all tests where we want to mock the get_parameter response + def test_handler(mock_parameter_response): + return_val = index.handler({}, {}) + assert return_val.get('message') == 'mock_value' + + ``` + +Alternatively, if we need more fully featured mocking (for example checking the arguments passed to `get_parameter`), we +can use [unittest.mock](https://docs.python.org/3/library/unittest.mock.html) from the python stdlib instead of pytest's `monkeypatch` fixture. In this example, we use the +[patch](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch) decorator to replace the `aws_lambda_powertools.utilities.parameters.get_parameter` function with a [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock) +object named `get_parameter_mock`. + +=== "tests.py" + ```python + from unittest.mock import patch + from src import index + + # Replaces "aws_lambda_powertools.utilities.parameters.get_parameter" with a Mock object + @patch("aws_lambda_powertools.utilities.parameters.get_parameter") + def test_handler(get_parameter_mock): + get_parameter_mock.return_value = 'mock_value' + + return_val = index.handler({}, {}) + get_parameter_mock.assert_called_with("my-parameter-name") + assert return_val.get('message') == 'mock_value' + + ``` diff --git a/mkdocs.yml b/mkdocs.yml index 7f8366675ea..8c4416b8c64 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -52,6 +52,8 @@ theme: - navigation.top - navigation.instant - navigation.indexes + - navigation.tracking + - content.code.annotate icon: repo: fontawesome/brands/github logo: media/aws-logo-light.svg @@ -60,10 +62,10 @@ theme: markdown_extensions: - admonition - - pymdownx.tabbed + - pymdownx.tabbed: + alternate_style: true - pymdownx.highlight: linenums: true - - pymdownx.superfences - pymdownx.details - pymdownx.snippets: base_path: "." @@ -75,7 +77,7 @@ markdown_extensions: - attr_list - pymdownx.emoji - pymdownx.inlinehilite - - attr_list + - pymdownx.superfences copyright: Copyright © 2021 Amazon Web Services @@ -92,3 +94,4 @@ extra_javascript: extra: version: provider: mike + default: latest diff --git a/mypy.ini b/mypy.ini index faf6014a54d..3061cc4a2d9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -31,3 +31,6 @@ ignore_missing_imports = True [mypy-aws_xray_sdk.ext.aiohttp.client] ignore_missing_imports = True + +[mypy-dataclasses] +ignore_missing_imports = True \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 1ed434d9afa..7b8ff0cb9b4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -243,7 +243,7 @@ toml = "*" [[package]] name = "flake8-bugbear" -version = "21.11.29" +version = "22.1.11" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -573,11 +573,11 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.3.6" +version = "8.1.9" description = "A Material Design theme for MkDocs" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] jinja2 = ">=2.11.1" @@ -729,7 +729,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.10.0" +version = "2.11.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -1056,7 +1056,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "c1df73741840dc9ee5e8dbbf6bfa02e09d3c84f53318e3e36cba9b30f6f2d2e4" +content-hash = "76f5787a913202119d952999ce182e225ea7c4fc5e977778a9bc77ac16822bb7" [metadata.files] atomicwrites = [ @@ -1180,8 +1180,8 @@ flake8-black = [ {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-21.11.29.tar.gz", hash = "sha256:8b04cb2fafc6a78e1a9d873bd3988e4282f7959bb6b0d7c1ae648ec09b937a7b"}, - {file = "flake8_bugbear-21.11.29-py36.py37.py38-none-any.whl", hash = "sha256:179e41ddae5de5e3c20d1f61736feeb234e70958fbb56ab3c28a67739c8e9a82"}, + {file = "flake8-bugbear-22.1.11.tar.gz", hash = "sha256:4c2a4136bd4ecb8bf02d5159af302ffc067642784c9d0488b33ce4610da825ee"}, + {file = "flake8_bugbear-22.1.11-py3-none-any.whl", hash = "sha256:ce7ae44aaaf67ef192b8a6de94a5ac617144e1675ad0654fdea556f48dc18d9b"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, @@ -1262,28 +1262,12 @@ markdown = [ {file = "Markdown-3.3.5.tar.gz", hash = "sha256:26e9546bfbcde5fcd072bd8f612c9c1b6e2677cb8aadbdf65206674f46dde069"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1292,27 +1276,14 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1322,12 +1293,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1353,8 +1318,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.3.6.tar.gz", hash = "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217"}, - {file = "mkdocs_material-7.3.6-py2.py3-none-any.whl", hash = "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75"}, + {file = "mkdocs-material-8.1.9.tar.gz", hash = "sha256:a15873a5e116bf4615af4fcedc85a0537492464365286cba50310d96fb066958"}, + {file = "mkdocs_material-8.1.9-py2.py3-none-any.whl", hash = "sha256:6feb433f29227b862418bd1009edeec2e52870770c476bf02840fc094b8823f2"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, @@ -1399,7 +1364,6 @@ pbr = [ {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, ] pdoc3 = [ - {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, ] platformdirs = [ @@ -1460,8 +1424,8 @@ pyflakes = [ {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ - {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, - {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pymdown-extensions = [ {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, diff --git a/pyproject.toml b/pyproject.toml index 2efaf62f8b1..82fb230f835 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.24.2" +version = "1.25.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] @@ -49,11 +49,11 @@ bandit = "^1.7.1" radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.2.0" -flake8-bugbear = "^21.11.29" -mkdocs-material = "^7.3.6" +flake8-bugbear = "^22.1.11" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" mypy = "^0.931" +mkdocs-material = "^8.1.9" [tool.poetry.extras] diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 76ecbc7cdd7..c21cd352961 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -12,7 +12,10 @@ from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.api_gateway import ( + ALBResolver, + APIGatewayHttpResolver, ApiGatewayResolver, + APIGatewayRestResolver, CORSConfig, ProxyEventType, Response, @@ -32,6 +35,12 @@ from tests.functional.utils import load_event +@pytest.fixture +def json_dump(): + # our serializers reduce length to save on costs; fixture to replicate separators + return lambda obj: json.dumps(obj, separators=(",", ":")) + + def read_media(file_name: str) -> bytes: path = Path(str(Path(__file__).parent.parent.parent.parent) + "/docs/media/" + file_name) return path.read_bytes() @@ -41,13 +50,14 @@ def read_media(file_name: str) -> bytes: def test_alb_event(): - # GIVEN a Application Load Balancer proxy type event - app = ApiGatewayResolver(proxy_type=ProxyEventType.ALBEvent) + # GIVEN an Application Load Balancer proxy type event + app = ALBResolver() @app.get("/lambda") def foo(): assert isinstance(app.current_event, ALBEvent) assert app.lambda_context == {} + assert app.current_event.request_context.elb_target_group_arn is not None return Response(200, content_types.TEXT_HTML, "foo") # WHEN calling the event handler @@ -62,12 +72,13 @@ def foo(): def test_api_gateway_v1(): # GIVEN a Http API V1 proxy type event - app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + app = APIGatewayRestResolver() @app.get("/my/path") def get_lambda() -> Response: assert isinstance(app.current_event, APIGatewayProxyEvent) assert app.lambda_context == {} + assert app.current_event.request_context.domain_name == "id.execute-api.us-east-1.amazonaws.com" return Response(200, content_types.APPLICATION_JSON, json.dumps({"foo": "value"})) # WHEN calling the event handler @@ -100,12 +111,13 @@ def get_lambda() -> Response: def test_api_gateway_v2(): # GIVEN a Http API V2 proxy type event - app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEventV2) + app = APIGatewayHttpResolver() @app.post("/my/path") def my_path() -> Response: assert isinstance(app.current_event, APIGatewayProxyEventV2) post_data = app.current_event.json_body + assert app.current_event.cookies[0] == "cookie1" return Response(200, content_types.TEXT_PLAIN, post_data["username"]) # WHEN calling the event handler @@ -506,13 +518,10 @@ def custom_method(): assert headers["Access-Control-Allow-Methods"] == "CUSTOM" -def test_service_error_responses(): +def test_service_error_responses(json_dump): # SCENARIO handling different kind of service errors being raised app = ApiGatewayResolver(cors=CORSConfig()) - def json_dump(obj): - return json.dumps(obj, separators=(",", ":")) - # GIVEN an BadRequestError @app.get(rule="/bad-request-error", cors=False) def bad_request_error(): @@ -641,7 +650,7 @@ def test_debug_mode_environment_variable(monkeypatch): assert app._debug -def test_debug_json_formatting(): +def test_debug_json_formatting(json_dump): # GIVEN debug is True app = ApiGatewayResolver(debug=True) response = {"message": "Foo"} @@ -654,7 +663,7 @@ def foo(): result = app({"path": "/foo", "httpMethod": "GET"}, None) # THEN return a pretty print json in the body - assert result["body"] == json.dumps(response, indent=4) + assert result["body"] == json_dump(response) def test_debug_print_event(capsys): diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 0732f1d58b1..5b76cda0475 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -648,6 +648,29 @@ def test_data_record_invalid_status_value(): assert e.value.args[0] == "UNSUPPORTED_STATUS" +def test_data_record_json_to_dict_mapping(): + # GIVEN a data record with status "INPROGRESS" and provided response data + data_record = DataRecord( + "key", status="INPROGRESS", response_data='{"body": "execution finished","statusCode": "200"}' + ) + + # WHEN translating response data to dictionary + response_data = data_record.response_json_as_dict() + + # THEN return dictionary + assert isinstance(response_data, dict) + + +def test_data_record_json_to_dict_mapping_when_response_data_none(): + # GIVEN a data record with status "INPROGRESS" and not set response data + data_record = DataRecord("key", status="INPROGRESS", response_data=None) + # WHEN translating response data to dictionary + response_data = data_record.response_json_as_dict() + + # THEN return null value + assert response_data is None + + @pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) def test_in_progress_never_saved_to_cache( idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index 6b05119b88b..20b0a74fc64 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -17,6 +17,7 @@ from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter from aws_lambda_powertools.logging.logger import set_package_logger from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.utilities.data_classes import S3Event, event_source @pytest.fixture @@ -635,3 +636,21 @@ def test_use_datetime(stdout, service_name, utc): assert re.fullmatch( f"custom timestamp: milliseconds=[0-9]+ microseconds=[0-9]+ timezone={re.escape(expected_tz)}", log["timestamp"] ) + + +def test_inject_lambda_context_log_event_request_data_classes(lambda_context, stdout, lambda_event, service_name): + # GIVEN Logger is initialized + logger = Logger(service=service_name, stream=stdout) + + # WHEN a lambda function is decorated with logger instructed to log event + # AND the event is an event source data class + @event_source(data_class=S3Event) + @logger.inject_lambda_context(log_event=True) + def handler(event, context): + logger.info("Hello") + + handler(lambda_event, lambda_context) + + # THEN logger should log event received from Lambda + logged_event, _ = capture_multiple_logging_statements_output(stdout) + assert logged_event["message"] == lambda_event diff --git a/tests/functional/test_logger_utils.py b/tests/functional/test_logger_utils.py index 1317fefc6ab..6d048b22dd6 100644 --- a/tests/functional/test_logger_utils.py +++ b/tests/functional/test_logger_utils.py @@ -30,9 +30,8 @@ class LogLevel(Enum): @pytest.fixture def logger(stdout, log_level): def _logger(): - logging.basicConfig(stream=stdout, level=log_level.NOTSET.value) - logger = logging.getLogger(name=service_name()) - return logger + logging.basicConfig(stream=stdout, level=log_level.INFO.value) + return logging.getLogger(name=service_name()) return _logger @@ -51,143 +50,146 @@ def service_name(): def test_copy_config_to_ext_loggers(stdout, logger, log_level): - - msg = "test message" - - # GIVEN a external logger and powertools logger initialized + # GIVEN two external loggers and powertools logger initialized logger_1 = logger() logger_2 = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to ALL external loggers + # AND external loggers used utils.copy_config_to_registered_loggers(source_logger=powertools_logger) + msg = "test message1" logger_1.info(msg) logger_2.info(msg) logs = capture_multiple_logging_statements_output(stdout) - # THEN + # THEN all external loggers used Powertools handler, formatter and log level for index, logger in enumerate([logger_1, logger_2]): assert len(logger.handlers) == 1 - assert type(logger.handlers[0]) is logging.StreamHandler - assert type(logger.handlers[0].formatter) is formatter.LambdaPowertoolsFormatter + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) assert logger.level == log_level.INFO.value assert logs[index]["message"] == msg assert logs[index]["level"] == log_level.INFO.name def test_copy_config_to_ext_loggers_include(stdout, logger, log_level): - - msg = "test message" - - # GIVEN a external logger and powertools logger initialized + # GIVEN an external logger and powertools logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to INCLUDED external loggers + # AND our external logger used utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger.name}) + msg = "test message2" logger.info(msg) log = capture_logging_output(stdout) - # THEN + # THEN included external loggers used Powertools handler, formatter and log level. assert len(logger.handlers) == 1 - assert type(logger.handlers[0]) is logging.StreamHandler - assert type(logger.handlers[0].formatter) is formatter.LambdaPowertoolsFormatter + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) assert logger.level == log_level.INFO.value assert log["message"] == msg assert log["level"] == log_level.INFO.name def test_copy_config_to_ext_loggers_wrong_include(stdout, logger, log_level): - - # GIVEN a external logger and powertools logger initialized + # GIVEN an external logger and powertools logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to INCLUDED NON EXISTING external loggers utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={"non-existing-logger"}) - # THEN + # THEN existing external logger is not modified assert not logger.handlers def test_copy_config_to_ext_loggers_exclude(stdout, logger, log_level): - - # GIVEN a external logger and powertools logger initialized + # GIVEN an external logger and powertools logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to ALL BUT external logger utils.copy_config_to_registered_loggers(source_logger=powertools_logger, exclude={logger.name}) - # THEN + # THEN external logger is not modified assert not logger.handlers def test_copy_config_to_ext_loggers_include_exclude(stdout, logger, log_level): - - msg = "test message" - - # GIVEN a external logger and powertools logger initialized + # GIVEN two external loggers and powertools logger initialized logger_1 = logger() logger_2 = logger() powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to INCLUDED external loggers + # AND external logger_1 is also in EXCLUDE list utils.copy_config_to_registered_loggers( source_logger=powertools_logger, include={logger_1.name, logger_2.name}, exclude={logger_1.name} ) + msg = "test message3" logger_2.info(msg) log = capture_logging_output(stdout) - # THEN + # THEN logger_1 is not modified and Logger_2 used Powertools handler, formatter and log level assert not logger_1.handlers assert len(logger_2.handlers) == 1 - assert type(logger_2.handlers[0]) is logging.StreamHandler - assert type(logger_2.handlers[0].formatter) is formatter.LambdaPowertoolsFormatter + assert isinstance(logger_2.handlers[0], logging.StreamHandler) + assert isinstance(logger_2.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) assert logger_2.level == log_level.INFO.value assert log["message"] == msg assert log["level"] == log_level.INFO.name def test_copy_config_to_ext_loggers_clean_old_handlers(stdout, logger, log_level): - - # GIVEN a external logger with handler and powertools logger initialized + # GIVEN an external logger with handler and powertools logger initialized logger = logger() - handler = logging.FileHandler("logfile") + handler = logging.NullHandler() logger.addHandler(handler) powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) - # WHEN configuration copied from powertools logger to ALL external loggers AND our external logger used + # WHEN configuration copied from powertools logger to ALL external loggers utils.copy_config_to_registered_loggers(source_logger=powertools_logger) - # THEN + # THEN old logger's handler removed and Powertools configuration used instead assert len(logger.handlers) == 1 - assert type(logger.handlers[0]) is logging.StreamHandler - assert type(logger.handlers[0].formatter) is formatter.LambdaPowertoolsFormatter + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) def test_copy_config_to_ext_loggers_custom_log_level(stdout, logger, log_level): - - msg = "test message" - - # GIVEN a external logger and powertools logger initialized + # GIVEN an external logger and powertools logger initialized logger = logger() powertools_logger = Logger(service=service_name(), level=log_level.CRITICAL.value, stream=stdout) level = log_level.WARNING.name - # WHEN configuration copied from powertools logger to ALL external loggers - # AND our external logger used with custom log_level + # WHEN configuration copied from powertools logger to INCLUDED external logger + # AND external logger used with custom log_level utils.copy_config_to_registered_loggers(source_logger=powertools_logger, include={logger.name}, log_level=level) + msg = "test message4" logger.warning(msg) log = capture_logging_output(stdout) - # THEN + # THEN external logger used Powertools handler, formatter and CUSTOM log level. assert len(logger.handlers) == 1 - assert type(logger.handlers[0]) is logging.StreamHandler - assert type(logger.handlers[0].formatter) is formatter.LambdaPowertoolsFormatter + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert isinstance(logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) assert powertools_logger.level == log_level.CRITICAL.value assert logger.level == log_level.WARNING.value assert log["message"] == msg assert log["level"] == log_level.WARNING.name + + +def test_copy_config_to_ext_loggers_should_not_break_append_keys(stdout, logger, log_level): + # GIVEN powertools logger initialized + powertools_logger = Logger(service=service_name(), level=log_level.INFO.value, stream=stdout) + + # WHEN configuration copied from powertools logger to ALL external loggers + utils.copy_config_to_registered_loggers(source_logger=powertools_logger) + + # THEN append_keys should not raise an exception + powertools_logger.append_keys(key="value") diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index ae160c65d87..9a8294749a7 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -493,9 +493,30 @@ def lambda_handler(evt, context): output = capture_metrics_output(capsys) + # THEN ColdStart metric and function_name and service dimension should be logged + assert output["ColdStart"] == [1.0] + assert output["function_name"] == "example_fn" + assert output["service"] == service + + +def test_log_metrics_capture_cold_start_metric_no_service(capsys, namespace): + # GIVEN Metrics is initialized without service + my_metrics = Metrics(namespace=namespace) + + # WHEN log_metrics is used with capture_cold_start_metric + @my_metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(evt, context): + pass + + LambdaContext = namedtuple("LambdaContext", "function_name") + lambda_handler({}, LambdaContext("example_fn")) + + output = capture_metrics_output(capsys) + # THEN ColdStart metric and function_name dimension should be logged assert output["ColdStart"] == [1.0] assert output["function_name"] == "example_fn" + assert output.get("service") is None def test_emit_cold_start_metric_only_once(capsys, namespace, service, metric): @@ -531,6 +552,7 @@ def lambda_handler(evt, context): # THEN it should raise a warning instead of throwing an exception with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("default") lambda_handler({}, {}) assert len(w) == 1 assert str(w[-1].message) == "No metrics to publish, skipping" diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index d32a044279b..2d9e6bab612 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -1,4 +1,5 @@ import json +import math from random import randint from typing import Callable, Dict, Optional from unittest.mock import patch @@ -166,20 +167,26 @@ def factory(item: Dict) -> str: return factory -def test_partial_sqs_processor_context_with_failure(sqs_event_factory, record_handler, partial_processor): +@pytest.mark.parametrize( + "success_messages_count", + ([1, 18, 34]), +) +def test_partial_sqs_processor_context_with_failure( + success_messages_count, sqs_event_factory, record_handler, partial_processor +): """ - Test processor with one failing record + Test processor with one failing record and multiple processed records """ fail_record = sqs_event_factory("fail") - success_record = sqs_event_factory("success") + success_records = [sqs_event_factory("success") for i in range(0, success_messages_count)] - records = [fail_record, success_record] + records = [fail_record, *success_records] response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} with Stubber(partial_processor.client) as stubber: - stubber.add_response("delete_message_batch", response) - + for _ in range(0, math.ceil((success_messages_count / partial_processor.max_message_batch))): + stubber.add_response("delete_message_batch", response) with pytest.raises(SQSBatchProcessingError) as error: with partial_processor(records, record_handler) as ctx: ctx.process() @@ -188,6 +195,27 @@ def test_partial_sqs_processor_context_with_failure(sqs_event_factory, record_ha stubber.assert_no_pending_responses() +def test_partial_sqs_processor_context_with_failure_exception(sqs_event_factory, record_handler, partial_processor): + """ + Test processor with one failing record + """ + fail_record = sqs_event_factory("fail") + success_record = sqs_event_factory("success") + + records = [fail_record, success_record] + + with Stubber(partial_processor.client) as stubber: + stubber.add_client_error( + method="delete_message_batch", service_error_code="ServiceUnavailable", http_status_code=503 + ) + with pytest.raises(Exception) as error: + with partial_processor(records, record_handler) as ctx: + ctx.process() + + assert "ServiceUnavailable" in str(error.value) + stubber.assert_no_pending_responses() + + def test_partial_sqs_processor_context_only_success(sqs_event_factory, record_handler, partial_processor): """ Test processor without failure diff --git a/tests/unit/test_utilities_batch.py b/tests/unit/test_utilities_batch.py index c491f0829cb..57de0223404 100644 --- a/tests/unit/test_utilities_batch.py +++ b/tests/unit/test_utilities_batch.py @@ -128,12 +128,12 @@ def test_partial_sqs_clean(monkeypatch, mocker, partial_sqs_processor): entries_to_clean_mock = mocker.patch.object(PartialSQSProcessor, "_get_entries_to_clean") queue_url_mock.return_value = mocker.sentinel.queue_url - entries_to_clean_mock.return_value = mocker.sentinel.entries_to_clean + entries_to_clean_mock.return_value = [mocker.sentinel.entries_to_clean] client_mock = mocker.patch.object(partial_sqs_processor, "client", autospec=True) with pytest.raises(SQSBatchProcessingError): partial_sqs_processor._clean() client_mock.delete_message_batch.assert_called_once_with( - QueueUrl=mocker.sentinel.queue_url, Entries=mocker.sentinel.entries_to_clean + QueueUrl=mocker.sentinel.queue_url, Entries=[mocker.sentinel.entries_to_clean] )