diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 489a5ccccc2..246992ec244 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -79,13 +79,20 @@ jobs: run: | make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" poetry run mike set-default --push latest - - name: Deploy all docs + - name: Release API docs to release version uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./api keep_files: true destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api + - name: Release API docs to latest + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api sync_master: needs: release diff --git a/CHANGELOG.md b/CHANGELOG.md index bfa5085a13d..1d25fc22a0a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,24 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## [1.15.0] - 2021-05-06 + +### Added + +* **Event handlers**: New API Gateway and ALB utility to reduce routing boilerplate and more +* **Documentation**: Logger enhancements such as bring your own formatter, handler, UTC support, and testing for Python 3.6 +* **Parser**: Support for API Gateway REST Proxy event and envelope +* **Logger**: Support for bringing custom formatter, custom handler, custom JSON serializer and deserializer, UTC support, expose `LambdaPowertoolsFormatter` +* **Metrics**: Support for persisting default dimensions that should always be added + +### Fixed + +* **Documentation**: Fix highlights, Parser types +* **Validator**: Fix event type annotations for `validate` standalone function +* **Parser**: Improve and fix types +* **Internal**: Remove X-Ray SDK version pinning as serialization regression has been fixed in 2.8.0 +* **Internal**: Latest documentation correctly includes a copy of API docs reference + ## [1.14.0] - 2021-04-09 ### Added diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py new file mode 100644 index 00000000000..2b1e1fc0900 --- /dev/null +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -0,0 +1,498 @@ +import base64 +import json +import logging +import re +import zlib +from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Set, Union + +from aws_lambda_powertools.shared.json_encoder import Encoder +from aws_lambda_powertools.utilities.data_classes import ALBEvent, APIGatewayProxyEvent, APIGatewayProxyEventV2 +from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = logging.getLogger(__name__) + + +class ProxyEventType(Enum): + """An enumerations of the supported proxy event types.""" + + APIGatewayProxyEvent = "APIGatewayProxyEvent" + APIGatewayProxyEventV2 = "APIGatewayProxyEventV2" + ALBEvent = "ALBEvent" + + +class CORSConfig(object): + """CORS Config + + + Examples + -------- + + Simple cors example using the default permissive cors, not this should only be used during early prototyping + + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + app = ApiGatewayResolver() + + @app.get("/my/path", cors=True) + def with_cors(): + return {"message": "Foo"} + + Using a custom CORSConfig where `with_cors` used the custom provided CORSConfig and `without_cors` + do not include any cors headers. + + from aws_lambda_powertools.event_handler.api_gateway import ( + ApiGatewayResolver, CORSConfig + ) + + cors_config = CORSConfig( + allow_origin="https://wwww.example.com/", + expose_headers=["x-exposed-response-header"], + allow_headers=["x-custom-request-header"], + max_age=100, + allow_credentials=True, + ) + app = ApiGatewayResolver(cors=cors_config) + + @app.get("/my/path") + def with_cors(): + return {"message": "Foo"} + + @app.get("/another-one", cors=False) + def without_cors(): + return {"message": "Foo"} + """ + + _REQUIRED_HEADERS = ["Authorization", "Content-Type", "X-Amz-Date", "X-Api-Key", "X-Amz-Security-Token"] + + def __init__( + self, + allow_origin: str = "*", + allow_headers: Optional[List[str]] = None, + expose_headers: Optional[List[str]] = None, + max_age: Optional[int] = None, + allow_credentials: bool = False, + ): + """ + Parameters + ---------- + allow_origin: str + The value of the `Access-Control-Allow-Origin` to send in the response. Defaults to "*", but should + only be used during development. + allow_headers: Optional[List[str]] + The list of additional allowed headers. This list is added to list of + built in allowed headers: `Authorization`, `Content-Type`, `X-Amz-Date`, + `X-Api-Key`, `X-Amz-Security-Token`. + expose_headers: Optional[List[str]] + A list of values to return for the Access-Control-Expose-Headers + max_age: Optional[int] + The value for the `Access-Control-Max-Age` + allow_credentials: bool + A boolean value that sets the value of `Access-Control-Allow-Credentials` + """ + self.allow_origin = allow_origin + self.allow_headers = set(self._REQUIRED_HEADERS + (allow_headers or [])) + self.expose_headers = expose_headers or [] + self.max_age = max_age + self.allow_credentials = allow_credentials + + def to_dict(self) -> Dict[str, str]: + """Builds the configured Access-Control http headers""" + headers = { + "Access-Control-Allow-Origin": self.allow_origin, + "Access-Control-Allow-Headers": ",".join(sorted(self.allow_headers)), + } + if self.expose_headers: + headers["Access-Control-Expose-Headers"] = ",".join(self.expose_headers) + if self.max_age is not None: + headers["Access-Control-Max-Age"] = str(self.max_age) + if self.allow_credentials is True: + headers["Access-Control-Allow-Credentials"] = "true" + return headers + + +class Response: + """Response data class that provides greater control over what is returned from the proxy event""" + + def __init__( + self, status_code: int, content_type: Optional[str], body: Union[str, bytes, None], headers: Dict = None + ): + """ + + Parameters + ---------- + status_code: int + Http status code, example 200 + content_type: str + Optionally set the Content-Type header, example "application/json". Note this will be merged into any + provided http headers + body: Union[str, bytes, None] + Optionally set the response body. Note: bytes body will be automatically base64 encoded + headers: dict + Optionally set specific http headers. Setting "Content-Type" hear would override the `content_type` value. + """ + self.status_code = status_code + self.body = body + self.base64_encoded = False + self.headers: Dict = headers or {} + if content_type: + self.headers.setdefault("Content-Type", content_type) + + +class Route: + """Internally used Route Configuration""" + + def __init__( + self, method: str, rule: Any, func: Callable, cors: bool, compress: bool, cache_control: Optional[str] + ): + self.method = method.upper() + self.rule = rule + self.func = func + self.cors = cors + self.compress = compress + self.cache_control = cache_control + + +class ResponseBuilder: + """Internally used Response builder""" + + def __init__(self, response: Response, route: Route = None): + self.response = response + self.route = route + + def _add_cors(self, cors: CORSConfig): + """Update headers to include the configured Access-Control headers""" + self.response.headers.update(cors.to_dict()) + + def _add_cache_control(self, cache_control: str): + """Set the specified cache control headers for 200 http responses. For non-200 `no-cache` is used.""" + self.response.headers["Cache-Control"] = cache_control if self.response.status_code == 200 else "no-cache" + + def _compress(self): + """Compress the response body, but only if `Accept-Encoding` headers includes gzip.""" + self.response.headers["Content-Encoding"] = "gzip" + if isinstance(self.response.body, str): + logger.debug("Converting string response to bytes before compressing it") + self.response.body = bytes(self.response.body, "utf-8") + gzip = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS | 16) + self.response.body = gzip.compress(self.response.body) + gzip.flush() + + def _route(self, event: BaseProxyEvent, cors: Optional[CORSConfig]): + """Optionally handle any of the route's configure response handling""" + if self.route is None: + return + if self.route.cors: + self._add_cors(cors or CORSConfig()) + if self.route.cache_control: + self._add_cache_control(self.route.cache_control) + if self.route.compress and "gzip" in (event.get_header_value("accept-encoding", "") or ""): + self._compress() + + def build(self, event: BaseProxyEvent, cors: CORSConfig = None) -> Dict[str, Any]: + """Build the full response dict to be returned by the lambda""" + self._route(event, cors) + + if isinstance(self.response.body, bytes): + logger.debug("Encoding bytes response with base64") + self.response.base64_encoded = True + self.response.body = base64.b64encode(self.response.body).decode() + return { + "statusCode": self.response.status_code, + "headers": self.response.headers, + "body": self.response.body, + "isBase64Encoded": self.response.base64_encoded, + } + + +class ApiGatewayResolver: + """API Gateway and ALB proxy resolver + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.get("/get-call") + def simple_get(): + return {"message": "Foo"} + + @app.post("/post-call") + def simple_post(): + post_data: dict = app.current_event.json_body + return {"message": post_data["value"]} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + + current_event: BaseProxyEvent + lambda_context: LambdaContext + + def __init__(self, proxy_type: Enum = ProxyEventType.APIGatewayProxyEvent, cors: CORSConfig = None): + """ + Parameters + ---------- + proxy_type: ProxyEventType + Proxy request type, defaults to API Gateway V1 + cors: CORSConfig + Optionally configure and enabled CORS. Not each route will need to have to cors=True + """ + self._proxy_type = proxy_type + self._routes: List[Route] = [] + self._cors = cors + self._cors_enabled: bool = cors is not None + self._cors_methods: Set[str] = {"OPTIONS"} + + def get(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Get route decorator with GET `method` + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.get("/get-call") + def simple_get(): + return {"message": "Foo"} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + return self.route(rule, "GET", cors, compress, cache_control) + + def post(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Post route decorator with POST `method` + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.post("/post-call") + def simple_post(): + post_data: dict = app.current_event.json_body + return {"message": post_data["value"]} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + return self.route(rule, "POST", cors, compress, cache_control) + + def put(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Put route decorator with PUT `method` + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.put("/put-call") + def simple_put(): + put_data: dict = app.current_event.json_body + return {"message": put_data["value"]} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + return self.route(rule, "PUT", cors, compress, cache_control) + + def delete(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Delete route decorator with DELETE `method` + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.delete("/delete-call") + def simple_delete(): + return {"message": "deleted"} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + return self.route(rule, "DELETE", cors, compress, cache_control) + + def patch(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Patch route decorator with PATCH `method` + + Examples + -------- + Simple example with a custom lambda handler using the Tracer capture_lambda_handler decorator + + ```python + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + app = ApiGatewayResolver() + + @app.patch("/patch-call") + def simple_patch(): + patch_data: dict = app.current_event.json_body + patch_data["value"] = patched + + return {"message": patch_data} + + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + """ + return self.route(rule, "PATCH", cors, compress, cache_control) + + def route(self, rule: str, method: str, cors: bool = None, compress: bool = False, cache_control: str = None): + """Route decorator includes parameter `method`""" + + def register_resolver(func: Callable): + logger.debug(f"Adding route using rule {rule} and method {method.upper()}") + if cors is None: + cors_enabled = self._cors_enabled + else: + cors_enabled = cors + self._routes.append(Route(method, self._compile_regex(rule), func, cors_enabled, compress, cache_control)) + if cors_enabled: + logger.debug(f"Registering method {method.upper()} to Allow Methods in CORS") + self._cors_methods.add(method.upper()) + return func + + return register_resolver + + def resolve(self, event, context) -> Dict[str, Any]: + """Resolves the response based on the provide event and decorator routes + + Parameters + ---------- + event: Dict[str, Any] + Event + context: LambdaContext + Lambda context + Returns + ------- + dict + Returns the dict response + """ + self.current_event = self._to_proxy_event(event) + self.lambda_context = context + return self._resolve().build(self.current_event, self._cors) + + def __call__(self, event, context) -> Any: + return self.resolve(event, context) + + @staticmethod + def _compile_regex(rule: str): + """Precompile regex pattern""" + rule_regex: str = re.sub(r"(<\w+>)", r"(?P\1.+)", rule) + return re.compile("^{}$".format(rule_regex)) + + def _to_proxy_event(self, event: Dict) -> BaseProxyEvent: + """Convert the event dict to the corresponding data class""" + if self._proxy_type == ProxyEventType.APIGatewayProxyEvent: + logger.debug("Converting event to API Gateway REST API contract") + return APIGatewayProxyEvent(event) + if self._proxy_type == ProxyEventType.APIGatewayProxyEventV2: + logger.debug("Converting event to API Gateway HTTP API contract") + return APIGatewayProxyEventV2(event) + logger.debug("Converting event to ALB contract") + return ALBEvent(event) + + def _resolve(self) -> ResponseBuilder: + """Resolves the response or return the not found response""" + method = self.current_event.http_method.upper() + path = self.current_event.path + for route in self._routes: + if method != route.method: + continue + match: Optional[re.Match] = route.rule.match(path) + if match: + logger.debug("Found a registered route. Calling function") + return self._call_route(route, match.groupdict()) + + logger.debug(f"No match found for path {path} and method {method}") + return self._not_found(method) + + def _not_found(self, method: str) -> ResponseBuilder: + """Called when no matching route was found and includes support for the cors preflight response""" + headers = {} + if self._cors: + logger.debug("CORS is enabled, updating headers.") + headers.update(self._cors.to_dict()) + + if method == "OPTIONS": + logger.debug("Pre-flight request detected. Returning CORS with null response") + headers["Access-Control-Allow-Methods"] = ",".join(sorted(self._cors_methods)) + return ResponseBuilder(Response(status_code=204, content_type=None, headers=headers, body=None)) + + return ResponseBuilder( + Response( + status_code=404, + content_type="application/json", + headers=headers, + body=json.dumps({"message": "Not found"}), + ) + ) + + def _call_route(self, route: Route, args: Dict[str, str]) -> ResponseBuilder: + """Actually call the matching route with any provided keyword arguments.""" + return ResponseBuilder(self._to_response(route.func(**args)), route) + + @staticmethod + def _to_response(result: Union[Dict, Response]) -> Response: + """Convert the route's result to a Response + + 2 main result types are supported: + + - Dict[str, Any]: Rest api response with just the Dict to json stringify and content-type is set to + application/json + - Response: returned as is, and allows for more flexibility + """ + if isinstance(result, Response): + return result + + logger.debug("Simple response detected, serializing return before constructing final response") + return Response( + status_code=200, + content_type="application/json", + body=json.dumps(result, separators=(",", ":"), cls=Encoder), + ) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 0140d057f0d..47418063732 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -1,14 +1,18 @@ import json import logging import os -from typing import Dict, Iterable, Optional, Union +import time +from abc import ABCMeta, abstractmethod +from functools import partial +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union from ..shared import constants -STD_LOGGING_KEYS = ( +RESERVED_LOG_ATTRS = ( "name", "msg", "args", + "level", "levelname", "levelno", "pathname", @@ -27,50 +31,127 @@ "processName", "process", "asctime", + "location", + "timestamp", ) -class JsonFormatter(logging.Formatter): - """AWS Lambda Logging formatter. +class BasePowertoolsFormatter(logging.Formatter, metaclass=ABCMeta): + @abstractmethod + def append_keys(self, **additional_keys): + raise NotImplementedError() - Formats the log message as a JSON encoded string. If the message is a - dict it will be used directly. If the message can be parsed as JSON, then - the parse d value is used in the output record. + def remove_keys(self, keys: Iterable[str]): + raise NotImplementedError() - Originally taken from https://gitlab.com/hadrien/aws_lambda_logging/ - """ +class LambdaPowertoolsFormatter(BasePowertoolsFormatter): + """AWS Lambda Powertools Logging formatter. - def __init__(self, **kwargs): - """Return a JsonFormatter instance. + Formats the log message as a JSON encoded string. If the message is a + dict it will be used directly. + """ - The `json_default` kwarg is used to specify a formatter for otherwise - unserializable values. It must not throw. Defaults to a function that - coerces the value to a string. + default_time_format = "%Y-%m-%d %H:%M:%S,%F%z" # '2021-04-17 18:19:57,656+0200' + custom_ms_time_directive = "%F" + + def __init__( + self, + json_serializer: Optional[Callable[[Dict], str]] = None, + json_deserializer: Optional[Callable[[Dict], str]] = None, + json_default: Optional[Callable[[Any], Any]] = None, + datefmt: str = None, + log_record_order: List[str] = None, + utc: bool = False, + **kwargs + ): + """Return a LambdaPowertoolsFormatter instance. The `log_record_order` kwarg is used to specify the order of the keys used in the structured json logs. By default the order is: "level", "location", "message", "timestamp", - "service" and "sampling_rate". + "service". Other kwargs are used to specify log field format strings. + + Parameters + ---------- + json_serializer : Callable, optional + function to serialize `obj` to a JSON formatted `str`, by default json.dumps + json_deserializer : Callable, optional + function to deserialize `str`, `bytes`, bytearray` containing a JSON document to a Python `obj`, + by default json.loads + json_default : Callable, optional + function to coerce unserializable values, by default str + + Only used when no custom JSON encoder is set + + datefmt : str, optional + String directives (strftime) to format log timestamp + + See https://docs.python.org/3/library/time.html#time.strftime + utc : bool, optional + set logging timestamp to UTC, by default False to continue to use local time as per stdlib + log_record_order : list, optional + set order of log keys when logging, by default ["level", "location", "message", "timestamp"] + kwargs + Key-value to be included in log messages """ - # Set the default unserializable function, by default values will be cast as str. - self.default_json_formatter = kwargs.pop("json_default", str) - # Set the insertion order for the log messages - self.log_format = dict.fromkeys(kwargs.pop("log_record_order", ["level", "location", "message", "timestamp"])) - self.reserved_keys = ["timestamp", "level", "location"] - # Set the date format used by `asctime` - super(JsonFormatter, self).__init__(datefmt=kwargs.pop("datefmt", None)) + self.json_deserializer = json_deserializer or json.loads + self.json_default = json_default or str + self.json_serializer = json_serializer or partial(json.dumps, default=self.json_default, separators=(",", ":")) + self.datefmt = datefmt + self.utc = utc + self.log_record_order = log_record_order or ["level", "location", "message", "timestamp"] + self.log_format = dict.fromkeys(self.log_record_order) # Set the insertion order for the log messages + self.update_formatter = self.append_keys # alias to old method + + if self.utc: + self.converter = time.gmtime + + super(LambdaPowertoolsFormatter, self).__init__(datefmt=self.datefmt) + + keys_combined = {**self._build_default_keys(), **kwargs} + self.log_format.update(**keys_combined) + + def serialize(self, log: Dict) -> str: + """Serialize structured log dict to JSON str""" + return self.json_serializer(log) - self.log_format.update(self._build_root_keys(**kwargs)) + def format(self, record: logging.LogRecord) -> str: # noqa: A003 + """Format logging record as structured JSON str""" + formatted_log = self._extract_log_keys(log_record=record) + formatted_log["message"] = self._extract_log_message(log_record=record) + formatted_log["exception"], formatted_log["exception_name"] = self._extract_log_exception(log_record=record) + formatted_log["xray_trace_id"] = self._get_latest_trace_id() + formatted_log = self._strip_none_records(records=formatted_log) + + return self.serialize(log=formatted_log) + + def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -> str: + record_ts = self.converter(record.created) + if datefmt: + return time.strftime(datefmt, record_ts) + + # NOTE: Python `time.strftime` doesn't provide msec directives + # so we create a custom one (%F) and replace logging record ts + # Reason 2 is that std logging doesn't support msec after TZ + msecs = "%03d" % record.msecs + custom_fmt = self.default_time_format.replace(self.custom_ms_time_directive, msecs) + return time.strftime(custom_fmt, record_ts) + + def append_keys(self, **additional_keys): + self.log_format.update(additional_keys) + + def remove_keys(self, keys: Iterable[str]): + for key in keys: + self.log_format.pop(key, None) @staticmethod - def _build_root_keys(**kwargs): + def _build_default_keys(): return { "level": "%(levelname)s", "location": "%(funcName)s:%(lineno)d", "timestamp": "%(asctime)s", - **kwargs, } @staticmethod @@ -78,12 +159,8 @@ def _get_latest_trace_id(): xray_trace_id = os.getenv(constants.XRAY_TRACE_ID_ENV) return xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None - def update_formatter(self, **kwargs): - self.log_format.update(kwargs) - - @staticmethod - def _extract_log_message(log_record: logging.LogRecord) -> Union[Dict, str, bool, Iterable]: - """Extract message from log record and attempt to JSON decode it + def _extract_log_message(self, log_record: logging.LogRecord) -> Union[Dict[str, Any], str, bool, Iterable]: + """Extract message from log record and attempt to JSON decode it if str Parameters ---------- @@ -95,20 +172,19 @@ def _extract_log_message(log_record: logging.LogRecord) -> Union[Dict, str, bool message: Union[Dict, str, bool, Iterable] Extracted message """ - if isinstance(log_record.msg, dict): - return log_record.msg + message = log_record.msg + if isinstance(message, dict): + return message - message: str = log_record.getMessage() - - # Attempt to decode non-str messages e.g. msg = '{"x": "y"}' - try: - message = json.loads(log_record.msg) - except (json.decoder.JSONDecodeError, TypeError, ValueError): - pass + if isinstance(message, str): # could be a JSON string + try: + message = self.json_deserializer(message) + except (json.decoder.JSONDecodeError, TypeError, ValueError): + pass return message - def _extract_log_exception(self, log_record: logging.LogRecord) -> Optional[str]: + def _extract_log_exception(self, log_record: logging.LogRecord) -> Union[Tuple[str, str], Tuple[None, None]]: """Format traceback information, if available Parameters @@ -118,33 +194,15 @@ def _extract_log_exception(self, log_record: logging.LogRecord) -> Optional[str] Returns ------- - log_record: Optional[str] - Log record with constant traceback info + log_record: Optional[Tuple[str, str]] + Log record with constant traceback info and exception name """ if log_record.exc_info: - return self.formatException(log_record.exc_info) - - return None + return self.formatException(log_record.exc_info), log_record.exc_info[0].__name__ - def _extract_log_exception_name(self, log_record: logging.LogRecord) -> Optional[str]: - """Extract the exception name, if available + return None, None - Parameters - ---------- - log_record : logging.LogRecord - Log record to extract exception name from - - Returns - ------- - log_record: Optional[str] - Log record with exception name - """ - if log_record.exc_info: - return log_record.exc_info[0].__name__ - - return None - - def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict: + def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict[str, Any]: """Extract and parse custom and reserved log keys Parameters @@ -157,36 +215,27 @@ def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict: formatted_log: Dict Structured log as dictionary """ - record_dict = log_record.__dict__.copy() # has extra kwargs we are after - record_dict["asctime"] = self.formatTime(log_record, self.datefmt) + record_dict = log_record.__dict__.copy() + record_dict["asctime"] = self.formatTime(record=log_record, datefmt=self.datefmt) + extras = {k: v for k, v in record_dict.items() if k not in RESERVED_LOG_ATTRS} - formatted_log = {} + formatted_log = {**extras} - # We have to iterate over a default or existing log structure - # then replace any logging expression for reserved keys e.g. '%(level)s' to 'INFO' - # and lastly add or replace incoming keys (those added within the constructor or .structure_logs method) + # Iterate over a default or existing log structure + # then replace any std log attribute e.g. '%(level)s' to 'INFO', '%(process)d to '4773' + # lastly add or replace incoming keys (those added within the constructor or .structure_logs method) for key, value in self.log_format.items(): - if value and key in self.reserved_keys: + if value and key in RESERVED_LOG_ATTRS: formatted_log[key] = value % record_dict else: formatted_log[key] = value - # pick up extra keys when logging a new message e.g. log.info("my message", extra={"additional_key": "value"} - # these messages will be added to the root of the final structure not within `message` key - for key, value in record_dict.items(): - if key not in STD_LOGGING_KEYS: - formatted_log[key] = value - return formatted_log - def format(self, record): # noqa: A003 - formatted_log = self._extract_log_keys(log_record=record) - formatted_log["message"] = self._extract_log_message(log_record=record) - formatted_log["exception_name"] = self._extract_log_exception_name(log_record=record) - formatted_log["exception"] = self._extract_log_exception(log_record=record) - formatted_log.update({"xray_trace_id": self._get_latest_trace_id()}) # fetch latest Trace ID, if any + @staticmethod + def _strip_none_records(records: Dict[str, Any]) -> Dict[str, Any]: + """Remove any key with None as value""" + return {k: v for k, v in records.items() if v is not None} - # Filter out top level key with values that are None - formatted_log = {k: v for k, v in formatted_log.items() if v is not None} - return json.dumps(formatted_log, default=self.default_json_formatter) +JsonFormatter = LambdaPowertoolsFormatter # alias to previous formatter diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 2e9cbb78d2e..3231f30eccd 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -4,7 +4,7 @@ import os import random import sys -from typing import Any, Callable, Dict, Optional, Union +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union import jmespath @@ -12,13 +12,15 @@ from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice from .exceptions import InvalidLoggerSamplingRateError from .filters import SuppressFilter -from .formatter import JsonFormatter +from .formatter import BasePowertoolsFormatter, LambdaPowertoolsFormatter from .lambda_context import build_lambda_context_model logger = logging.getLogger(__name__) is_cold_start = True +PowertoolsFormatter = TypeVar("PowertoolsFormatter", bound=BasePowertoolsFormatter) + def _is_cold_start() -> bool: """Verifies whether is cold start @@ -42,6 +44,7 @@ def _is_cold_start() -> bool: # so we need to return to subclassing removed in #97 # All methods/properties continue to be proxied to inner logger # https://github.com/awslabs/aws-lambda-powertools-python/issues/107 +# noinspection PyRedeclaration class Logger(logging.Logger): # lgtm [py/missing-call-to-init] """Creates and setups a logger to format statements in JSON. @@ -69,6 +72,28 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] sample rate for debug calls within execution context defaults to 0.0 stream: sys.stdout, optional valid output for a logging stream, by default sys.stdout + logger_formatter: PowertoolsFormatter, optional + custom logging formatter that implements PowertoolsFormatter + logger_handler: logging.Handler, optional + custom logging handler e.g. logging.FileHandler("file.log") + + Parameters propagated to LambdaPowertoolsFormatter + --------------------------------------------- + datefmt: str, optional + String directives (strftime) to format log timestamp, by default it uses RFC 3339. + json_serializer : Callable, optional + function to serialize `obj` to a JSON formatted `str`, by default json.dumps + json_deserializer : Callable, optional + function to deserialize `str`, `bytes`, bytearray` containing a JSON document to a Python `obj`, + by default json.loads + json_default : Callable, optional + function to coerce unserializable values, by default `str()` + + Only used when no custom formatter is set + utc : bool, optional + set logging timestamp to UTC, by default False to continue to use local time as per stdlib + log_record_order : list, optional + set order of log keys when logging, by default ["level", "location", "message", "timestamp"] Example ------- @@ -96,7 +121,7 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] >>> logger = Logger(service="payment") >>> >>> def handler(event, context): - logger.structure_logs(append=True, payment_id=event["payment_id"]) + logger.append_keys(payment_id=event["payment_id"]) logger.info("Hello") **Create child Logger using logging inheritance via child param** @@ -110,6 +135,30 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] >>> from aws_lambda_powertools import Logger >>> logger = Logger(service="payment", child=True) + **Logging in UTC timezone** + + >>> # app.py + >>> import logging + >>> from aws_lambda_powertools import Logger + >>> + >>> logger = Logger(service="payment", utc=True) + + **Brings message as the first key in log statements** + + >>> # app.py + >>> import logging + >>> from aws_lambda_powertools import Logger + >>> + >>> logger = Logger(service="payment", log_record_order=["message"]) + + **Logging to a file instead of standard output for testing** + + >>> # app.py + >>> import logging + >>> from aws_lambda_powertools import Logger + >>> + >>> logger = Logger(service="payment", logger_handler=logging.FileHandler("log.json")) + Raises ------ InvalidLoggerSamplingRateError @@ -123,22 +172,26 @@ def __init__( child: bool = False, sampling_rate: float = None, stream: sys.stdout = None, + logger_formatter: Optional[PowertoolsFormatter] = None, + logger_handler: Optional[logging.Handler] = None, **kwargs, ): self.service = resolve_env_var_choice( choice=service, env=os.getenv(constants.SERVICE_NAME_ENV, "service_undefined") ) self.sampling_rate = resolve_env_var_choice( - choice=sampling_rate, env=os.getenv(constants.LOGGER_LOG_SAMPLING_RATE, 0.0) + choice=sampling_rate, env=os.getenv(constants.LOGGER_LOG_SAMPLING_RATE) ) + self.child = child + self.logger_formatter = logger_formatter + self.logger_handler = logger_handler or logging.StreamHandler(stream) + self.log_level = self._get_log_level(level) self._is_deduplication_disabled = resolve_truthy_env_var_choice( env=os.getenv(constants.LOGGER_LOG_DEDUPLICATION_ENV, "false") ) - self.log_level = self._get_log_level(level) - self.child = child - self._handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout) self._default_log_keys = {"service": self.service, "sampling_rate": self.sampling_rate} self._logger = self._get_logger() + self._init_logger(**kwargs) def __getattr__(self, name): @@ -147,7 +200,7 @@ def __getattr__(self, name): return getattr(self._logger, name) def _get_logger(self): - """ Returns a Logger named {self.service}, or {self.service.filename} for child loggers""" + """Returns a Logger named {self.service}, or {self.service.filename} for child loggers""" logger_name = self.service if self.child: logger_name = f"{self.service}.{self._get_caller_filename()}" @@ -168,7 +221,7 @@ def _init_logger(self, **kwargs): self._configure_sampling() self._logger.setLevel(self.log_level) - self._logger.addHandler(self._handler) + self._logger.addHandler(self.logger_handler) self.structure_logs(**kwargs) # Pytest Live Log feature duplicates log records for colored output @@ -269,7 +322,7 @@ def handler(event, context): def decorate(event, context): lambda_context = build_lambda_context_model(context) cold_start = _is_cold_start() - self.structure_logs(append=True, cold_start=cold_start, **lambda_context.__dict__) + self.append_keys(cold_start=cold_start, **lambda_context.__dict__) if correlation_id_path: self.set_correlation_id(jmespath.search(correlation_id_path, event)) @@ -282,30 +335,44 @@ def decorate(event, context): return decorate - def structure_logs(self, append: bool = False, **kwargs): + def append_keys(self, **additional_keys): + self.registered_formatter.append_keys(**additional_keys) + + def remove_keys(self, keys: Iterable[str]): + self.registered_formatter.remove_keys(keys) + + @property + def registered_handler(self) -> logging.Handler: + """Convenience property to access logger handler""" + handlers = self._logger.parent.handlers if self.child else self._logger.handlers + return handlers[0] + + @property + def registered_formatter(self) -> Optional[PowertoolsFormatter]: + """Convenience property to access logger formatter""" + return self.registered_handler.formatter + + def structure_logs(self, append: bool = False, **keys): """Sets logging formatting to JSON. Optionally, it can append keyword arguments - to an existing logger so it is available - across future log statements. + to an existing logger so it is available across future log statements. Last keyword argument and value wins if duplicated. Parameters ---------- append : bool, optional - [description], by default False + append keys provided to logger formatter, by default False """ - # Child loggers don't have handlers attached, use its parent handlers - handlers = self._logger.parent.handlers if self.child else self._logger.handlers - for handler in handlers: - if append: - # Update existing formatter in an existing logger handler - handler.formatter.update_formatter(**kwargs) - else: - # Set a new formatter for a logger handler - handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs)) + if append: + # Maintenance: Add deprecation warning for major version. Refer to append_keys() when docs are updated + self.append_keys(**keys) + else: + log_keys = {**self._default_log_keys, **keys} + formatter = self.logger_formatter or LambdaPowertoolsFormatter(**log_keys) + self.registered_handler.setFormatter(formatter) def set_correlation_id(self, value: str): """Sets the correlation_id in the logging json @@ -315,11 +382,11 @@ def set_correlation_id(self, value: str): value : str Value for the correlation id """ - self.structure_logs(append=True, correlation_id=value) + self.append_keys(correlation_id=value) @staticmethod def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: - """ Returns preferred log level set by the customer in upper case """ + """Returns preferred log level set by the customer in upper case""" if isinstance(level, int): return level @@ -331,7 +398,7 @@ def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: @staticmethod def _get_caller_filename(): - """ Return caller filename by finding the caller frame """ + """Return caller filename by finding the caller frame""" # Current frame => _get_logger() # Previous frame => logger.py # Before previous frame => Caller diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 59d3b18e0e4..8cc4895f03e 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -34,18 +34,13 @@ class Metrics(MetricManager): from aws_lambda_powertools import Metrics metrics = Metrics(namespace="ServerlessAirline", service="payment") - metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) - metrics.add_metric(name="BookingConfirmation", unit="Count", value=1) - metrics.add_dimension(name="function_version", value="$LATEST") - ... - @metrics.log_metrics() + @metrics.log_metrics(capture_cold_start_metric=True) def lambda_handler(): - do_something() - return True + metrics.add_metric(name="BookingConfirmation", unit="Count", value=1) + metrics.add_dimension(name="function_version", value="$LATEST") - def do_something(): - metrics.add_metric(name="Something", unit="Count", value=1) + return True Environment variables --------------------- @@ -74,13 +69,15 @@ def do_something(): _metrics: Dict[str, Any] = {} _dimensions: Dict[str, str] = {} _metadata: Dict[str, Any] = {} + _default_dimensions: Dict[str, Any] = {} def __init__(self, service: str = None, namespace: str = None): self.metric_set = self._metrics - self.dimension_set = self._dimensions self.service = service self.namespace: Optional[str] = namespace self.metadata_set = self._metadata + self.default_dimensions = self._default_dimensions + self.dimension_set = {**self._default_dimensions, **self._dimensions} super().__init__( metric_set=self.metric_set, @@ -90,17 +87,48 @@ def __init__(self, service: str = None, namespace: str = None): service=self.service, ) + def set_default_dimensions(self, **dimensions): + """Persist dimensions across Lambda invocations + + Parameters + ---------- + dimensions : Dict[str, Any], optional + metric dimensions as key=value + + Example + ------- + **Sets some default dimensions that will always be present across metrics and invocations** + + from aws_lambda_powertools import Metrics + + metrics = Metrics(namespace="ServerlessAirline", service="payment") + metrics.set_default_dimensions(environment="demo", another="one") + + @metrics.log_metrics() + def lambda_handler(): + return True + """ + for name, value in dimensions.items(): + self.add_dimension(name, value) + + self.default_dimensions.update(**dimensions) + + def clear_default_dimensions(self): + self.default_dimensions.clear() + def clear_metrics(self): logger.debug("Clearing out existing metric set from memory") self.metric_set.clear() self.dimension_set.clear() self.metadata_set.clear() + self.set_default_dimensions(**self.default_dimensions) # re-add default dimensions def log_metrics( self, lambda_handler: Callable[[Any, Any], Any] = None, capture_cold_start_metric: bool = False, raise_on_empty_metrics: bool = False, + default_dimensions: Dict[str, str] = None, ): """Decorator to serialize and publish metrics at the end of a function execution. @@ -123,11 +151,13 @@ def handler(event, context): Parameters ---------- lambda_handler : Callable[[Any, Any], Any], optional - Lambda function handler, by default None + lambda function handler, by default None capture_cold_start_metric : bool, optional - Captures cold start metric, by default False + captures cold start metric, by default False raise_on_empty_metrics : bool, optional - Raise exception if no metrics are emitted, by default False + raise exception if no metrics are emitted, by default False + default_dimensions: Dict[str, str], optional + metric dimensions as key=value that will always be present Raises ------ @@ -143,11 +173,14 @@ def handler(event, context): self.log_metrics, capture_cold_start_metric=capture_cold_start_metric, raise_on_empty_metrics=raise_on_empty_metrics, + default_dimensions=default_dimensions, ) @functools.wraps(lambda_handler) def decorate(event, context): try: + if default_dimensions: + self.set_default_dimensions(**default_dimensions) response = lambda_handler(event, context) if capture_cold_start_metric: self.__add_cold_start_metric(context=context) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 5e2e545e356..47568802202 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -720,7 +720,7 @@ def __build_config( patch_modules: Union[List, Tuple] = None, provider: BaseProvider = None, ): - """ Populates Tracer config for new and existing initializations """ + """Populates Tracer config for new and existing initializations""" is_disabled = disabled if disabled is not None else self._is_tracer_disabled() is_service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 6c7cb9e60c3..73e064d0f26 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -21,14 +21,6 @@ class ALBEvent(BaseProxyEvent): def request_context(self) -> ALBEventRequestContext: return ALBEventRequestContext(self._data) - @property - def http_method(self) -> str: - return self["httpMethod"] - - @property - def path(self) -> str: - return self["path"] - @property def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueQueryStringParameters") diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index 6c06e48e63e..1ce6a742125 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -164,7 +164,7 @@ def path(self) -> str: @property def stage(self) -> str: - """The deployment stage of the API request """ + """The deployment stage of the API request""" return self["requestContext"]["stage"] @property @@ -217,15 +217,6 @@ def version(self) -> str: def resource(self) -> str: return self["resource"] - @property - def path(self) -> str: - return self["path"] - - @property - def http_method(self) -> str: - """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" - return self["httpMethod"] - @property def multi_value_headers(self) -> Dict[str, List[str]]: return self["multiValueHeaders"] @@ -361,7 +352,7 @@ def authorizer(self) -> Optional[RequestContextV2Authorizer]: @property def domain_name(self) -> str: - """A domain name """ + """A domain name""" return self["requestContext"]["domainName"] @property @@ -384,7 +375,7 @@ def route_key(self) -> str: @property def stage(self) -> str: - """The deployment stage of the API request """ + """The deployment stage of the API request""" return self["requestContext"]["stage"] @property @@ -446,3 +437,12 @@ def path_parameters(self) -> Optional[Dict[str, str]]: @property def stage_variables(self) -> Optional[Dict[str, str]]: return self.get("stageVariables") + + @property + def path(self) -> str: + return self.raw_path + + @property + def http_method(self) -> str: + """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" + return self.request_context.http.method diff --git a/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py b/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py index dae09065568..56d37851631 100644 --- a/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py +++ b/aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py @@ -22,7 +22,7 @@ class AppSyncIdentityIAM(DictWrapper): @property def source_ip(self) -> List[str]: - """The source IP address of the caller received by AWS AppSync. """ + """The source IP address of the caller received by AWS AppSync.""" return self["sourceIp"] @property @@ -67,7 +67,7 @@ class AppSyncIdentityCognito(DictWrapper): @property def source_ip(self) -> List[str]: - """The source IP address of the caller received by AWS AppSync. """ + """The source IP address of the caller received by AWS AppSync.""" return self["sourceIp"] @property diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 6f393cccb60..a6b975c6072 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -1,3 +1,4 @@ +import json from typing import Any, Dict, Optional @@ -57,8 +58,23 @@ def is_base64_encoded(self) -> Optional[bool]: @property def body(self) -> Optional[str]: + """Submitted body of the request as a string""" return self.get("body") + @property + def json_body(self) -> Any: + """Parses the submitted body as json""" + return json.loads(self["body"]) + + @property + def path(self) -> str: + return self["path"] + + @property + def http_method(self) -> str: + """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" + return self["httpMethod"] + def get_query_string_value(self, name: str, default_value: Optional[str] = None) -> Optional[str]: """Get query string value by name diff --git a/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py b/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py index 9c00922069e..bdbf9d68afa 100644 --- a/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py +++ b/aws_lambda_powertools/utilities/data_classes/event_bridge_event.py @@ -60,7 +60,7 @@ def detail_type(self) -> str: @property def detail(self) -> Dict[str, Any]: - """A JSON object, whose content is at the discretion of the service originating the event. """ + """A JSON object, whose content is at the discretion of the service originating the event.""" return self["detail"] @property diff --git a/aws_lambda_powertools/utilities/data_classes/s3_object_event.py b/aws_lambda_powertools/utilities/data_classes/s3_object_event.py index f653f7aca6e..b22434c68e3 100644 --- a/aws_lambda_powertools/utilities/data_classes/s3_object_event.py +++ b/aws_lambda_powertools/utilities/data_classes/s3_object_event.py @@ -53,7 +53,7 @@ def payload(self) -> str: class S3ObjectUserRequest(DictWrapper): - """ Information about the original call to S3 Object Lambda.""" + """Information about the original call to S3 Object Lambda.""" @property def url(self) -> str: diff --git a/aws_lambda_powertools/utilities/data_classes/sns_event.py b/aws_lambda_powertools/utilities/data_classes/sns_event.py index e96b096fe6b..84ee1c1ef0f 100644 --- a/aws_lambda_powertools/utilities/data_classes/sns_event.py +++ b/aws_lambda_powertools/utilities/data_classes/sns_event.py @@ -46,7 +46,7 @@ def message_id(self) -> str: @property def message(self) -> str: - """A string that describes the message. """ + """A string that describes the message.""" return self["Sns"]["Message"] @property diff --git a/aws_lambda_powertools/utilities/data_classes/sqs_event.py b/aws_lambda_powertools/utilities/data_classes/sqs_event.py index 778b8f56f36..0e70684cc3f 100644 --- a/aws_lambda_powertools/utilities/data_classes/sqs_event.py +++ b/aws_lambda_powertools/utilities/data_classes/sqs_event.py @@ -70,7 +70,7 @@ def binary_value(self) -> Optional[str]: @property def data_type(self) -> str: - """ The message attribute data type. Supported types include `String`, `Number`, and `Binary`.""" + """The message attribute data type. Supported types include `String`, `Number`, and `Binary`.""" return self["dataType"] @@ -120,7 +120,7 @@ def md5_of_body(self) -> str: @property def event_source(self) -> str: - """The AWS service from which the SQS record originated. For SQS, this is `aws:sqs` """ + """The AWS service from which the SQS record originated. For SQS, this is `aws:sqs`""" return self["eventSource"] @property diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 263414a9573..0cbd34213c1 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -110,7 +110,7 @@ class BasePersistenceLayer(ABC): """ def __init__(self): - """Initialize the defaults """ + """Initialize the defaults""" self.configured = False self.event_key_jmespath: Optional[str] = None self.event_key_compiled_jmespath = None diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py index 10c70272c7d..e6f63c4792d 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -1,3 +1,4 @@ +from .apigw import ApiGatewayEnvelope from .base import BaseEnvelope from .cloudwatch import CloudWatchLogsEnvelope from .dynamodb import DynamoDBStreamEnvelope @@ -7,6 +8,7 @@ from .sqs import SqsEnvelope __all__ = [ + "ApiGatewayEnvelope", "CloudWatchLogsEnvelope", "DynamoDBStreamEnvelope", "EventBridgeEnvelope", diff --git a/aws_lambda_powertools/utilities/parser/envelopes/apigw.py b/aws_lambda_powertools/utilities/parser/envelopes/apigw.py new file mode 100644 index 00000000000..6b74a3037e9 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/apigw.py @@ -0,0 +1,32 @@ +import logging +from typing import Any, Dict, Optional, Type, Union + +from ..models import APIGatewayProxyEventModel +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class ApiGatewayEnvelope(BaseEnvelope): + """API Gateway envelope to extract data within body key""" + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Optional[Model]: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Type[Model] + Data model provided to parse after extracting data using envelope + + Returns + ------- + Any + Parsed detail payload with model provided + """ + logger.debug(f"Parsing incoming data with Api Gateway model {APIGatewayProxyEventModel}") + parsed_envelope = APIGatewayProxyEventModel.parse_obj(data) + logger.debug(f"Parsing event payload in `detail` with {model}") + return self._parse(data=parsed_envelope.body, model=model) diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 923d5d057c3..0e59b2197a8 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -1,4 +1,10 @@ from .alb import AlbModel, AlbRequestContext, AlbRequestContextData +from .apigw import ( + APIGatewayEventAuthorizer, + APIGatewayEventIdentity, + APIGatewayEventRequestContext, + APIGatewayProxyEventModel, +) from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel @@ -70,4 +76,8 @@ "SqsRecordModel", "SqsMsgAttributeModel", "SqsAttributesModel", + "APIGatewayProxyEventModel", + "APIGatewayEventRequestContext", + "APIGatewayEventAuthorizer", + "APIGatewayEventIdentity", ] diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py new file mode 100644 index 00000000000..de968e20ecf --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -0,0 +1,92 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, root_validator +from pydantic.networks import IPvAnyNetwork + +from ..types import Literal + + +class ApiGatewayUserCertValidity(BaseModel): + notBefore: str + notAfter: str + + +class ApiGatewayUserCert(BaseModel): + clientCertPem: str + subjectDN: str + issuerDN: str + serialNumber: str + validity: ApiGatewayUserCertValidity + + +class APIGatewayEventIdentity(BaseModel): + accessKey: Optional[str] + accountId: Optional[str] + apiKey: Optional[str] + apiKeyId: Optional[str] + caller: Optional[str] + cognitoAuthenticationProvider: Optional[str] + cognitoAuthenticationType: Optional[str] + cognitoIdentityId: Optional[str] + cognitoIdentityPoolId: Optional[str] + principalOrgId: Optional[str] + sourceIp: IPvAnyNetwork + user: Optional[str] + userAgent: Optional[str] + userArn: Optional[str] + clientCert: Optional[ApiGatewayUserCert] + + +class APIGatewayEventAuthorizer(BaseModel): + claims: Optional[Dict[str, Any]] + scopes: Optional[List[str]] + + +class APIGatewayEventRequestContext(BaseModel): + accountId: str + apiId: str + authorizer: APIGatewayEventAuthorizer + stage: str + protocol: str + identity: APIGatewayEventIdentity + requestId: str + requestTime: str + requestTimeEpoch: datetime + resourceId: Optional[str] + resourcePath: str + domainName: Optional[str] + domainPrefix: Optional[str] + extendedRequestId: Optional[str] + httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + path: str + connectedAt: Optional[datetime] + connectionId: Optional[str] + eventType: Optional[Literal["CONNECT", "MESSAGE", "DISCONNECT"]] + messageDirection: Optional[str] + messageId: Optional[str] + routeKey: Optional[str] + operationName: Optional[str] + + +class APIGatewayProxyEventModel(BaseModel): + version: str + resource: str + path: str + httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + headers: Dict[str, str] + multiValueHeaders: Dict[str, List[str]] + queryStringParameters: Optional[Dict[str, str]] + multiValueQueryStringParameters: Optional[Dict[str, List[str]]] + requestContext: APIGatewayEventRequestContext + pathParameters: Optional[Dict[str, str]] + stageVariables: Optional[Dict[str, str]] + isBase64Encoded: bool + body: str + + @root_validator() + def check_message_id(cls, values): + message_id, event_type = values.get("messageId"), values.get("eventType") + if message_id is not None and event_type != "MESSAGE": + raise TypeError("messageId is available only when the `eventType` is `MESSAGE`") + return values diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py index 16cdc45c907..7a7f6aec56b 100644 --- a/aws_lambda_powertools/utilities/parser/parser.py +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Optional, Type, TypeVar, Union from ...middleware_factory import lambda_handler_decorator from ..typing import LambdaContext @@ -10,14 +10,17 @@ logger = logging.getLogger(__name__) +EventParserReturnType = TypeVar("EventParserReturnType") + + @lambda_handler_decorator def event_parser( - handler: Callable[[Any, LambdaContext], Any], + handler: Callable[[Any, LambdaContext], EventParserReturnType], event: Dict[str, Any], context: LambdaContext, - model: Model, - envelope: Optional[Envelope] = None, -) -> Any: + model: Type[Model], + envelope: Optional[Union[Envelope, Type[Envelope]]] = None, +) -> EventParserReturnType: """Lambda handler decorator to parse & validate events using Pydantic models It requires a model that implements Pydantic BaseModel to parse & validate the event. @@ -83,7 +86,9 @@ def handler(event: Order, context: LambdaContext): return handler(parsed_event, context) -def parse(event: Dict[str, Any], model: Model, envelope: Optional[Envelope] = None) -> Model: +def parse( + event: Dict[str, Any], model: Type[Model], envelope: Optional[Union[Envelope, Type[Envelope]]] = None +) -> Model: """Standalone function to parse & validate events using Pydantic models Typically used when you need fine-grained control over error handling compared to event_parser decorator. diff --git a/aws_lambda_powertools/utilities/validation/validator.py b/aws_lambda_powertools/utilities/validation/validator.py index c962f8fff76..3628d486eb3 100644 --- a/aws_lambda_powertools/utilities/validation/validator.py +++ b/aws_lambda_powertools/utilities/validation/validator.py @@ -132,7 +132,7 @@ def handler(event, context): def validate( - event: Dict, + event: Any, schema: Dict, formats: Optional[Dict] = None, envelope: str = None, diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md new file mode 100644 index 00000000000..c3c449338b5 --- /dev/null +++ b/docs/core/event_handler/api_gateway.md @@ -0,0 +1,711 @@ +--- +title: API Gateway +description: Core utility +--- + +Event handler for Amazon API Gateway REST/HTTP APIs and Application Loader Balancer (ALB). + +### Key Features + +* Lightweight routing to reduce boilerplate for API Gateway REST/HTTP API and ALB +* Seamless support for CORS, binary and Gzip compression +* Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to easily access event and identity information +* Built-in support for Decimals JSON encoding +* Support for dynamic path expressions + +## Getting started + +### Required resources + +You must have an existing [API Gateway Proxy integration](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html){target="_blank"} or [ALB](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html){target="_blank"} configured to invoke your Lambda function. There is no additional permissions or dependencies required to use this utility. + +This is the sample infrastructure for API Gateway we are using for the examples in this documentation. + +=== "template.yml" + + ```yaml + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: Hello world event handler API Gateway + + Globals: + Api: + TracingEnabled: true + Cors: # see CORS section + AllowOrigin: "'https://example.com'" + AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" + MaxAge: "'300'" + BinaryMediaTypes: # see Binary responses section + - '*~1*' # converts to */* for any binary type + Function: + Timeout: 5 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication + POWERTOOLS_SERVICE_NAME: hello + + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Description: Hello World function + Events: + HelloUniverse: + Type: Api + Properties: + Path: /hello + Method: GET + HelloYou: + Type: Api + Properties: + Path: /hello/{name} # see Dynamic routes section + Method: GET + CustomMessage: + Type: Api + Properties: + Path: /{message}/{name} # see Dynamic routes section + Method: GET + + Outputs: + HelloWorldApigwURL: + Description: "API Gateway endpoint URL for Prod environment for Hello World Function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello" + + HelloWorldFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt HelloWorldFunction.Arn + ``` + +### API Gateway decorator + +You can define your functions to match a path and HTTP method, when you use the decorator `ApiGatewayResolver`. + +Here's an example where we have two separate functions to resolve two paths: `/hello`. + +!!! info "We automatically serialize `Dict` responses as JSON and set content-type to `application/json`" + +=== "app.py" + + ```python hl_lines="3 7 9 12 18" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() # by default API Gateway REST API (v1) + + @app.get("/hello") + @tracer.capture_method + def get_hello_universe(): + return {"message": "hello universe"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` +=== "hello_event.json" + + This utility uses `path` and `httpMethod` to route to the right function. This helps make unit tests and local invocation easier too. + + ```json hl_lines="4-5" + { + "body": "hello", + "resource": "/hello", + "path": "/hello", + "httpMethod": "GET", + "isBase64Encoded": false, + "queryStringParameters": { + "foo": "bar" + }, + "multiValueQueryStringParameters": {}, + "pathParameters": { + "hello": "/hello" + }, + "stageVariables": {}, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" + }, + "multiValueHeaders": {}, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "Prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "25/Jul/2020:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "identity": { + "cognitoIdentityPoolId": null, + "accountId": null, + "cognitoIdentityId": null, + "caller": null, + "accessKey": null, + "sourceIp": "127.0.0.1", + "cognitoAuthenticationType": null, + "cognitoAuthenticationProvider": null, + "userArn": null, + "userAgent": "Custom User Agent String", + "user": null + }, + "path": "/Prod/hello", + "resourcePath": "/hello", + "httpMethod": "POST", + "apiId": "1234567890", + "protocol": "HTTP/1.1" + } + } + ``` + +=== "response.json" + + ```json + { + "statusCode": 200, + "headers": { + "Content-Type": "application/json" + }, + "body": "{\"message\":\"hello universe\"}", + "isBase64Encoded": false + } + ``` + +#### HTTP API + +When using API Gateway HTTP API to front your Lambda functions, you can instruct `ApiGatewayResolver` to conform with their contract via `proxy_type` param: + +=== "app.py" + + ```python hl_lines="3 7" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, ProxyEventType + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver(proxy_type=ProxyEventType.http_api_v2) + + @app.get("/hello") + @tracer.capture_method + def get_hello_universe(): + return {"message": "hello universe"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +#### ALB + +When using ALB to front your Lambda functions, you can instruct `ApiGatewayResolver` to conform with their contract via `proxy_type` param: + +=== "app.py" + + ```python hl_lines="3 7" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, ProxyEventType + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver(proxy_type=ProxyEventType.alb_event) + + @app.get("/hello") + @tracer.capture_method + def get_hello_universe(): + return {"message": "hello universe"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +### Dynamic routes + +You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This allows you to define such dynamic value as part of your function signature. + +=== "app.py" + + ```python hl_lines="9 11" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + @app.get("/hello/") + @tracer.capture_method + def get_hello_you(name): + return {"message": f"hello {name}"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "resource": "/hello/{name}", + "path": "/hello/lessa", + "httpMethod": "GET", + ... + } + ``` + +You can also nest paths as configured earlier in [our sample infrastructure](#required-resources): `/{message}/{name}`. + +=== "app.py" + + ```python hl_lines="9 11" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + @app.get("//") + @tracer.capture_method + def get_message(message, name): + return {"message": f"{message}, {name}}"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "resource": "/{message}/{name}", + "path": "/hi/michael", + "httpMethod": "GET", + ... + } + ``` + +### Accessing request details + +By integrating with [Data classes utilities](../../utilities/data_classes.md){target="_blank"}, you have access to request details, Lambda context and also some convenient methods. + +These are made available in the response returned when instantiating `ApiGatewayResolver`, for example `app.current_event` and `app.lambda_context`. + +#### Query strings and payload + +Within `app.current_event` property, you can access query strings as dictionary via `query_string_parameters`, or by name via `get_query_string_value` method. + +You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property. + +=== "app.py" + + ```python hl_lines="7-9 11" + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + app = ApiGatewayResolver() + + @app.get("/hello") + def get_hello_you(): + query_strings_as_dict = app.current_event.query_string_parameters + json_payload = app.current_event.json_body + payload = app.current_event.body + + name = app.current_event.get_query_string_value(name="name", default_value="") + return {"message": f"hello {name}}"} + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +#### Headers + +Similarly to [Query strings](#query-strings), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. + +=== "app.py" + + ```python hl_lines="7-8" + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + app = ApiGatewayResolver() + + @app.get("/hello") + def get_hello_you(): + headers_as_dict = app.current_event.headers + name = app.current_event.get_header_value(name="X-Name", default_value="") + + return {"message": f"hello {name}}"} + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +## Advanced + +### CORS + +You can configure CORS at the `ApiGatewayResolver` constructor via `cors` parameter using the `CORSConfig` class. + +This will ensure that CORS headers are always returned as part of the response when your functions match the path invoked. + +=== "app.py" + + ```python hl_lines="9 11" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, CORSConfig + + tracer = Tracer() + logger = Logger() + + cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) + app = ApiGatewayResolver(cors=cors_config) + + @app.get("/hello/") + @tracer.capture_method + def get_hello_you(name): + return {"message": f"hello {name}}"} + + @app.get("/hello", cors=False) # optionally exclude CORS from response, if needed + @tracer.capture_method + def get_hello_no_cors_needed(): + return {"message": "hello, no CORS needed for this path ;)"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "response.json" + + ```json + { + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Origin": "https://www.example.com", + "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" + }, + "body": "{\"message\":\"hello lessa\"}", + "isBase64Encoded": false + } + ``` + +=== "response_no_cors.json" + + ```json + { + "statusCode": 200, + "headers": { + "Content-Type": "application/json" + }, + "body": "{\"message\":\"hello lessa\"}", + "isBase64Encoded": false + } + ``` + + +!!! tip "Optionally disable class on a per path basis with `cors=False` parameter" + +#### Pre-flight + +Pre-flight (OPTIONS) calls are typically handled at the API Gateway level as per [our sample infrastructure](#required-resources), no Lambda integration necessary. However, ALB expects you to handle pre-flight requests. + +For convenience, we automatically handle that for you as long as you [setup CORS in the constructor level](#cors). + +#### Defaults + +For convenience, these are the default values when using `CORSConfig` to enable CORS: + +!!! warning "Always configure `allow_origin` when using in production" + +Key | Value | Note +------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- +**[allow_origin](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank"}**: `str` | `*` | Only use the default value for development. **Never use `*` for production** unless your use case requires it +**[allow_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers){target="_blank"}**: `List[str]` | `[Authorization, Content-Type, X-Amz-Date, X-Api-Key, X-Amz-Security-Token]` | Additional headers will be appended to the default list for your convenience +**[expose_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers){target="_blank"}**: `List[str]` | `[]` | Any additional header beyond the [safe listed by CORS specification](https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_response_header){target="_blank"}. +**[max_age](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age){target="_blank"}**: `int` | `` | Only for pre-flight requests if you choose to have your function to handle it instead of API Gateway +**[allow_credentials](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials){target="_blank"}**: `bool` | `False` | Only necessary when you need to expose cookies, authorization headers or TLS client certificates. + +### Fine grained responses + +You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. + +=== "app.py" + + ```python hl_lines="10-14" + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + + app = ApiGatewayResolver() + + @app.get("/hello") + def get_hello_you(): + payload = json.dumps({"message": "I'm a teapot"}) + custom_headers = {"X-Custom": "X-Value"} + + return Response(status_code=418, + content_type="application/json", + body=payload, + headers=custom_headers + ) + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "response.json" + + ```json + { + "body": "{\"message\":\"I\'m a teapot\"}", + "headers": { + "Content-Type": "application/json", + "X-Custom": "X-Value" + }, + "isBase64Encoded": false, + "statusCode": 418 + } + +### Compress + +You can compress with gzip and base64 encode your responses via `compress` parameter. + +!!! warning "The client must send the `Accept-Encoding` header, otherwise a normal response will be sent" + +=== "app.py" + + ```python hl_lines="5 7" + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + app = ApiGatewayResolver() + + @app.get("/hello", compress=True) + def get_hello_you(): + return {"message": "hello universe"} + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "headers": { + "Accept-Encoding": "gzip" + }, + "httpMethod": "GET", + "path": "/hello", + ... + } + ``` + +=== "response.json" + + ```json + { + "body": "H4sIAAAAAAACE6tWyk0tLk5MT1WyUspIzcnJVyjNyyxLLSpOVaoFANha8kEcAAAA", + "headers": { + "Content-Encoding": "gzip", + "Content-Type": "application/json" + }, + "isBase64Encoded": true, + "statusCode": 200 + } + ``` + +### Binary responses + +For convenience, we automatically base64 encode binary responses. You can also use in combination with `compress` parameter if your client supports gzip. + +Like `compress` feature, the client must send the `Accept` header with the correct media type. + +!!! warning "This feature requires API Gateway to configure binary media types, see [our sample infrastructure](#required-resources) for reference" + +=== "app.py" + + ```python hl_lines="4 7 11" + import os + from pathlib import Path + + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + + app = ApiGatewayResolver() + logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() + + @app.get("/logo") + def get_logo(): + return Response(status_code=200, content_type="image/svg+xml", body=logo_file) + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "logo.svg" + ```xml + + + + + + + + + + + + + ``` +=== "sample_request.json" + + ```json + { + "headers": { + "Accept": "image/svg+xml" + }, + "httpMethod": "GET", + "path": "/logo", + ... + } + ``` + +=== "response.json" + + ```json + { + "body": "H4sIAAAAAAACE3VXa2scRxD87ID/w+byKTCzN899yFZMLBLHYEMg4K9BHq0l4c2duDudZIf891TVrPwiMehmd+fR3dXV1eOnz+7/mpvjtNtfbzenK9+6VTNtyvbienN5uro9vLPD6tlPj797+r21zYtpM+3OD9vdSfPzxfbt1Lyc59v9QZ8aP7au9ab5482L5pf7m+3u0Pw+317al5um1cc31chJ07XONc9vr+eLxv3YNNby/P3x8ks3/Kq5vjhdvTr/MO3+xAu83OxPV1eHw83Jen13d9fexXa7u1wH59wam5clJ/fz9eb9fy304ziuNYulpyt3c79qPtTx8XePmuP1dPd8y4nGNdGlxg9h1ewPH+bpdDVtzt/Ok317Xt5f7ra3m4uTzXTXfLHyicyf7G/OC5bf7Kb9tDtOKwXGI5rDhxtMHKb7w7rs95x41O4P7u931/N88sOv+vfkn/rV66vd3c7TyXScNtuLiydlvr75+su3O5+uZYkmL3n805vzw1VT5vM9cIOpVQM8Xw9dm0yHn+JMbHvj+IoRiJuhHYtrBxPagPfBpLbDmmD6NuB7NpxzWttpDG3EKd46vAfr29HE2XZtxMYABx4VzIxY2VmvnaMN2jkW642zAdPZRkyms76DndGZPpthgEt9MvB0wEJM91gacUpsvc3c3eO4sYXJHuf52A42jNjEp2qXRzjrMzaENtngLGOwCS4krO7xzXscoIeR4WFLNpFbEo7GNrhdOhkEGElrgUyCx3gokQYAHMOLxjvFVY1XVDNQy0AKkx4PgPSIjcALv8QDf0He9NZ3BaEFhTdgInESMPKBMwAemzxTZT1zgFP5vRekOJTg8zucquEvCULsXOx1hjY5bWKuAh1fFkbuIGABa71+4cuRcMHfuiboMB6Kw8gGW5mQtDUwBa1f4s/Kd6+1iD8oplyIvq9oebEFYBOKsXi+ORNEJBKLbBhaXzIcZ0YGbgMF9IAkdG9I4Y/N65RhaYCLi+morPSipK8RMlmdIgahbFR+s2UF+Gpe3ieip6/kayCbkHpYRUp6QgH6MGFEgLuiFQHbviLO/DkdEGkbk4ljsawtR7J1zIAFk0aTioBBpIQYbmWNJArqKQlXxh9UoSQXjZxFIGoGFmzSPM/8FD+w8IDNmxG+l1pwlr5Ey/rwzP1gay1mG5Ykj6/GrpoIRZOMYqR3GiudHijAFJPJiePVCGBr2mIlE0bEUKpIMFrQwjCEcQabB4pOmJVyPolCYWEnYJZVyU+VE4JrQC56cPWtpfSVHfhkJD60RDy6foYyRNv1NZlCXoh/YwM05C7rEU0sitKERehqrLkiYCrhvcSO53VFrzxeAqB0UxHzbMFPb/q+1ltVRoITiTnNKRWm0ownRlbpFUu/iI5uYRMEoMb/kLt+yR3BSq98xtkQXElWl5h1yg6nvcz5SrVFta1UHTz3v4koIEzIVPgRKlkkc44ykipJsip7kVMWdICDFPBMMoOwUhlbRb23NX/UjqHYesi4sK2OmDhaWpLKiE1YzxbCsUhATZUlb2q7iBX7Kj/Kc80atEz66yWyXorhGTIkRqnrSURu8fWhdNIFKT7B8UnNJPIUwYLgLVHkOD7knC4rjNpFeturrBRRbmtHkpTh5VVIncmBnYlpjhT3HhMUd1urK0rQE7AE14goJdFRWBYZHyUIcLLm3AuhwF5qO7Zg4B+KTodiJCaSOMN4SXbRC+pR1Vs8FEZGOcnCtKvNvnC/aoiKj2+dekO1GdS4VMfAQo2++KXOonIgf5ifoo6hOkm6EFDP8pItNXvVpFNdxiNErThVXG1UQXHEz/eEYWk/jEmCRcyyaKtWKbVSr1YNc6rytcLnq6AORazytbMa9nqOutgYdUPmGL72nyKmlzxMVcjpPLPdE7cC1MlQQkpyZHasjPbRFVpJ+mNPqlcln6Tekk5lg7cd/9CbJMkkXFInSmrcw4PHQS1p0HZSANa6s8CqNiN/Qh7hI0vVfK7aj6u1Lnq67n173/P1vhd6Nf+ETgJLgSyjjYGpj2SVD3JM96PM+xRRZYcMtV8NJHKn3bW+pUydGMFg1CMelUSIgjwj4nGUVULDxxJJM1zvsM/q0uZ5TQggwFnoRanI9h76gcSJDPYLz5dA/y/EgXnygRcGostStqFXv0KdD7qP6MYUTKVXr1uhEzty8QP5plqDXbZuk1mtuUZGv3jtg8JIFKHTJrt6H9AduN4TAE6q95qzMEikMmkVRq+bKQXrC0cfUrdm7h5+8b8YjP8Cgadmu5INAAA=", + "headers": { + "Content-Type": "image/svg+xml" + }, + "isBase64Encoded": true, + "statusCode": 200 + } + ``` + +## Testing your code + +You can test your routes by passing a proxy event request where `path` and `httpMethod`. + +=== "test_app.py" + + ```python hl_lines="18-24" + from dataclasses import dataclass + + import pytest + import app + + @pytest.fixture + def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + def test_lambda_handler(lambda_context): + minimal_event = { + "path": "/hello", + "httpMethod": "GET" + "requestContext": { # correlation ID + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef" + } + } + + app.lambda_handler(minimal_event, lambda_context) + ``` + +=== "app.py" + + ```python + from aws_lambda_powertools import Logger + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + logger = Logger() + app = ApiGatewayResolver() # by default API Gateway REST API (v1) + + @app.get("/hello") + def get_hello_universe(): + return {"message": "hello universe"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +## FAQ + +**What's the difference between this utility and frameworks like Chalice?** + +Chalice is a full featured microframework that manages application and infrastructure. This utility, however, is largely focused on routing to reduce boilerplate and expects you to setup and manage infrastructure with your framework of choice. + +That said, [Chalice has native integration with Lambda Powertools](https://aws.github.io/chalice/topics/middleware.html){target="_blank"} if you're looking for a more opinionated and web framework feature set. diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index 3f61a4ad311..67ad1999285 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -7,8 +7,6 @@ Event handler for AWS AppSync Direct Lambda Resolver and Amplify GraphQL Transfo ### Key Features - - * Automatically parse API arguments to function arguments * Choose between strictly match a GraphQL field name or all of them to a function * Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to access resolver and identity information diff --git a/docs/core/logger.md b/docs/core/logger.md index ae842d6a613..a544bf91e4b 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -44,17 +44,19 @@ Setting | Description | Environment variable | Constructor parameter ### Standard structured keys -Your Logger will include the following keys to your structured logging, by default: - -Key | Type | Example | Description -------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- -**timestamp** | str | "2020-05-24 18:17:33,774" | Timestamp of actual log statement -**level** | str | "INFO" | Logging level -**location** | str | "collect.handler:1" | Source code location where statement was executed -**service** | str | "payment" | Service name defined. "service_undefined" will be used if unknown -**sampling_rate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 10% in this case -**message** | any | "Collecting payment" | Log statement value. Unserializable JSON values will be casted to string -**xray_trace_id** | str | "1-5759e988-bd862e3fe1be46a994272793" | X-Ray Trace ID when Lambda function has enabled Tracing +Your Logger will include the following keys to your structured logging: + +Key | Example | Note +------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- +**level**: `str` | `INFO` | Logging level +**location**: `str` | `collect.handler:1` | Source code location where statement was executed +**message**: `Any` | `Collecting payment` | Unserializable JSON values are casted as `str` +**timestamp**: `str` | `2021-05-03 10:20:19,650+0200` | Timestamp with milliseconds, by default uses local timezone +**service**: `str` | `payment` | Service name defined, by default `service_undefined` +**xray_trace_id**: `str` | `1-5759e988-bd862e3fe1be46a994272793` | When [tracing is enabled](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html){target="_blank"}, it shows X-Ray Trace ID +**sampling_rate**: `float` | `0.1` | When enabled, it shows sampling rate in percentage e.g. 10% +**exception_name**: `str` | `ValueError` | When `logger.exception` is used and there is an exception +**exception**: `str` | `Traceback (most recent call last)..` | When `logger.exception` is used and there is an exception ### Capturing Lambda context info @@ -65,12 +67,12 @@ You can enrich your structured logs with key Lambda context information via `inj ```python hl_lines="5" from aws_lambda_powertools import Logger - logger = Logger() + logger = Logger(service="payment") @logger.inject_lambda_context def handler(event, context): - logger.info("Collecting payment") - ... + logger.info("Collecting payment") + # You can log entire objects too logger.info({ "operation": "collect_payment", @@ -81,47 +83,45 @@ You can enrich your structured logs with key Lambda context information via `inj === "Example CloudWatch Logs excerpt" - ```json hl_lines="6-10 26-27" + ```json hl_lines="7-11 16-19" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, - "sampling_rate": 0.0, - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:7", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" }, { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:15", - "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, - "sampling_rate": 0.0, - "message": { - "operation": "collect_payment", - "charge_id": "ch_AZFlk2345C0" - } + "level": "INFO", + "location": "collect.handler:10", + "message": { + "operation": "collect_payment", + "charge_id": "ch_AZFlk2345C0" + }, + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" } ``` When used, this will include the following keys: -Key | Type | Example -------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- -**cold_start**| bool | false -**function_name**| str | "example-powertools-HelloWorldFunction-1P1Z6B39FLU73" -**function_memory_size**| int | 128 -**function_arn**| str | "arn:aws:lambda:eu-west-1:012345678910:function:example-powertools-HelloWorldFunction-1P1Z6B39FLU73" -**function_request_id**| str | "899856cb-83d1-40d7-8611-9e78f15f32f4" +Key | Example +------------------------------------------------- | --------------------------------------------------------------------------------- +**cold_start**: `bool` | `false` +**function_name** `str` | `example-powertools-HelloWorldFunction-1P1Z6B39FLU73` +**function_memory_size**: `int` | `128` +**function_arn**: `str` | `arn:aws:lambda:eu-west-1:012345678910:function:example-powertools-HelloWorldFunction-1P1Z6B39FLU73` +**function_request_id**: `str` | `899856cb-83d1-40d7-8611-9e78f15f32f4` #### Logging incoming event @@ -135,7 +135,7 @@ When debugging in non-production environments, you can instruct Logger to log th ```python hl_lines="5" from aws_lambda_powertools import Logger - logger = Logger() + logger = Logger(service="payment") @logger.inject_lambda_context(log_event=True) def handler(event, context): @@ -144,21 +144,18 @@ When debugging in non-production environments, you can instruct Logger to log th #### Setting a Correlation ID -> New in 1.12.0 - You can set a Correlation ID using `correlation_id_path` param by passing a [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank"}. === "collect.py" - ```python hl_lines="6" + ```python hl_lines="5" from aws_lambda_powertools import Logger - logger = Logger() + logger = Logger(service="payment") @logger.inject_lambda_context(correlation_id_path="headers.my_request_id_header") def handler(event, context): logger.info("Collecting payment") - ... ``` === "Example Event" @@ -173,15 +170,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME === "Example CloudWatch Logs excerpt" - ```json hl_lines="7" + ```json hl_lines="12" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "sampling_rate": 0.0, - "correlation_id": "correlation_id_value", - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:7", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + "correlation_id": "correlation_id_value" } ``` @@ -189,16 +190,15 @@ We provide [built-in JMESPath expressions](#built-in-correlation-id-expressions) === "collect.py" - ```python hl_lines="2" + ```python hl_lines="2 6" from aws_lambda_powertools import Logger from aws_lambda_powertools.logging import correlation_paths - logger = Logger() + logger = Logger(service="payment") @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) def handler(event, context): logger.info("Collecting payment") - ... ``` === "Example Event" @@ -213,67 +213,73 @@ We provide [built-in JMESPath expressions](#built-in-correlation-id-expressions) === "Example CloudWatch Logs excerpt" - ```json hl_lines="7" + ```json hl_lines="12" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "sampling_rate": 0.0, - "correlation_id": "correlation_id_value", - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:8", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + "correlation_id": "correlation_id_value" } ``` ### Appending additional keys +!!! info "Keys might be persisted across invocations" + Always set additional keys as part of your handler to ensure they have the latest value. Additional keys are kept in memory as part of a Logger instance and might be reused in non-cold start scenarios. + You can append additional keys using either mechanism: -* Persist new keys across all future log messages via `structure_logs` method +* Persist new keys across all future log messages via `append_keys` method * Add additional keys on a per log message basis via `extra` parameter -#### structure_logs method +#### append_keys method -You can append your own keys to your existing Logger via `structure_logs(append=True, **kwargs)` method. +> NOTE: `append_keys` replaces `structure_logs(append=True, **kwargs)` method. Both will continue to work until the next major version. -> Omitting `append=True` will reset the existing structured logs to standard keys + keys provided as arguments +You can append your own keys to your existing Logger via `append_keys(**additional_key_values)` method. === "collect.py" - ```python hl_lines="7" + ```python hl_lines="9" from aws_lambda_powertools import Logger - logger = Logger() + logger = Logger(service="payment") def handler(event, context): order_id = event.get("order_id") - logger.structure_logs(append=True, order_id=order_id) - logger.info("Collecting payment") - ... + + # this will ensure order_id key always has the latest value before logging + logger.append_keys(order_id=order_id) + + logger.info("Collecting payment") ``` === "Example CloudWatch Logs excerpt" ```json hl_lines="7" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "sampling_rate": 0.0, - "order_id": "order_id_value", - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:11", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "order_id": "order_id_value" } ``` !!! tip "Logger will automatically reject any key with a None value" - If you conditionally add keys depending on the payload, you can use the highlighted line above as an example. + If you conditionally add keys depending on the payload, you can follow the example above. - This example will add `order_id` if its value is not empty, and in subsequent invocations where `order_id` might not be present it'll remove it from the logger. + This example will add `order_id` if its value is not empty, and in subsequent invocations where `order_id` might not be present it'll remove it from the Logger. #### extra parameter -> New in 1.10.0 - Extra parameter is available for all log levels' methods, as implemented in the standard logging library - e.g. `logger.info, logger.warning`. It accepts any dictionary, and all keyword arguments will be added as part of the root structure of the logs for that log statement. @@ -283,30 +289,28 @@ It accepts any dictionary, and all keyword arguments will be added as part of th === "extra_parameter.py" ```python hl_lines="6" + from aws_lambda_powertools import Logger + logger = Logger(service="payment") fields = { "request_id": "1123" } - - logger.info("Hello", extra=fields) + logger.info("Collecting payment", extra=fields) ``` === "Example CloudWatch Logs excerpt" ```json hl_lines="7" { - "timestamp": "2021-01-12 14:08:12,357", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "sampling_rate": 0.0, - "request_id": "1123", - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:6", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "request_id": "1123" } ``` #### set_correlation_id method -> New in 1.12.0 - You can set a correlation_id to your existing Logger via `set_correlation_id(value)` method by passing any string value. === "collect.py" @@ -314,12 +318,11 @@ You can set a correlation_id to your existing Logger via `set_correlation_id(val ```python hl_lines="6" from aws_lambda_powertools import Logger - logger = Logger() + logger = Logger(service="payment") def handler(event, context): logger.set_correlation_id(event["requestContext"]["requestId"]) logger.info("Collecting payment") - ... ``` === "Example Event" @@ -336,13 +339,12 @@ You can set a correlation_id to your existing Logger via `set_correlation_id(val ```json hl_lines="7" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "sampling_rate": 0.0, - "correlation_id": "correlation_id_value", - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:7", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "correlation_id": "correlation_id_value" } ``` @@ -354,13 +356,12 @@ Alternatively, you can combine [Data Classes utility](../utilities/data_classes. from aws_lambda_powertools import Logger from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent - logger = Logger() + logger = Logger(service="payment") def handler(event, context): event = APIGatewayProxyEvent(event) logger.set_correlation_id(event.request_context.request_id) logger.info("Collecting payment") - ... ``` === "Example Event" @@ -378,7 +379,7 @@ Alternatively, you can combine [Data Classes utility](../utilities/data_classes. { "timestamp": "2020-05-24 18:17:33,774", "level": "INFO", - "location": "collect.handler:1", + "location": "collect.handler:9", "service": "payment", "sampling_rate": 0.0, "correlation_id": "correlation_id_value", @@ -386,20 +387,58 @@ Alternatively, you can combine [Data Classes utility](../utilities/data_classes. } ``` +### Removing additional keys + +You can remove any additional key from Logger state using `remove_keys`. + +=== "collect.py" + + ```python hl_lines="9" + from aws_lambda_powertools import Logger + + logger = Logger(service="payment") + + def handler(event, context): + logger.append_keys(sample_key="value") + logger.info("Collecting payment") + + logger.remove_keys(["sample_key"]) + logger.info("Collecting payment without sample key") + ``` + +=== "Example CloudWatch Logs excerpt" + + ```json hl_lines="7" + { + "level": "INFO", + "location": "collect.handler:7", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "sample_key": "value" + }, + { + "level": "INFO", + "location": "collect.handler:10", + "message": "Collecting payment without sample key", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment" + } + ``` + ### Logging exceptions -When logging exceptions, Logger will add new keys named `exception_name` and `exception` with the full traceback as a string. +Use `logger.exception` method to log contextual information about exceptions. Logger will include `exception_name` and `exception` keys to aid troubleshooting and error enumeration. !!! tip - > New in 1.12.0 - - You can use your preferred Log Analytics tool to enumerate exceptions across all your services using `exception_name` key. + You can use your preferred Log Analytics tool to enumerate and visualize exceptions across all your services using `exception_name` key. -=== "logging_an_exception.py" +=== "collect.py" - ```python hl_lines="7" + ```python hl_lines="8" from aws_lambda_powertools import Logger - logger = Logger() + + logger = Logger(service="payment") try: raise ValueError("something went wrong") @@ -409,16 +448,15 @@ When logging exceptions, Logger will add new keys named `exception_name` and `ex === "Example CloudWatch Logs excerpt" - ```json + ```json hl_lines="7-8" { - "level": "ERROR", - "location": ":4", - "message": "Received an exception", - "timestamp": "2020-08-28 18:11:38,886", - "service": "service_undefined", - "sampling_rate": 0.0, - "exception_name": "ValueError", - "exception": "Traceback (most recent call last):\n File \"\", line 2, in \nValueError: something went wrong" + "level": "ERROR", + "location": "collect.handler:5", + "message": "Received an exception", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "exception_name": "ValueError", + "exception": "Traceback (most recent call last):\n File \"\", line 2, in \nValueError: something went wrong" } ``` @@ -459,16 +497,16 @@ In this example, `Logger` will create a parent logger named `payment` and a chil ### Sampling debug logs -Use sampling when you want to dynamically change your log level to DEBUG based on a **percentage of your concurrent/cold start invocations**. +Use sampling when you want to dynamically change your log level to **DEBUG** based on a **percentage of your concurrent/cold start invocations**. -You can set using `POWERTOOLS_LOGGER_SAMPLE_RATE` env var or explicitly with `sample_rate` parameter: Values range from `0.0` to `1` (100%) +You can use values ranging from `0.0` to `1` (100%) when setting `POWERTOOLS_LOGGER_SAMPLE_RATE` env var or `sample_rate` parameter in Logger. !!! tip "When is this useful?" - Take for example a sudden increase in concurrency. When looking into logs you might not have enough information, and while you can adjust log levels it might not happen again. + Let's imagine a sudden spike increase in concurrency triggered a transient issue downstream. When looking into the logs you might not have enough information, and while you can adjust log levels it might not happen again. This feature takes into account transient issues where additional debugging information can be useful. -Sampling decision happens at the Logger class initialization. This means sampling may happen significantly more or less than you expect if you have a steady low number of invocations and thus few cold starts. +Sampling decision happens at the Logger initialization. This means sampling may happen significantly more or less than depending on your traffic patterns, for example a steady low number of invocations and thus few cold starts. !!! note If you want Logger to calculate sampling upon every invocation, please open a [feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=). @@ -479,47 +517,70 @@ Sampling decision happens at the Logger class initialization. This means samplin from aws_lambda_powertools import Logger # Sample 10% of debug logs e.g. 0.1 - logger = Logger(sample_rate=0.1, level="INFO") + logger = Logger(service="payment", sample_rate=0.1) def handler(event, context): - logger.debug("Verifying whether order_id is present") - if "order_id" in event: - logger.info("Collecting payment") - ... + logger.debug("Verifying whether order_id is present") + logger.info("Collecting payment") ``` === "Example CloudWatch Logs excerpt" - ```json hl_lines="3 11 25" + ```json hl_lines="2 4 12 15 25" { - "timestamp": "2020-05-24 18:17:33,774", - "level": "DEBUG", - "location": "collect.handler:1", - "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, - "sampling_rate": 0.1, - "message": "Verifying whether order_id is present" - } - + "level": "DEBUG", + "location": "collect.handler:7", + "message": "Verifying whether order_id is present", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + "sampling_rate": 0.1 + }, { - "timestamp": "2020-05-24 18:17:33,774", - "level": "INFO", - "location": "collect.handler:1", - "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, - "sampling_rate": 0.1, - "message": "Collecting payment" + "level": "INFO", + "location": "collect.handler:7", + "message": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494+0200", + "service": "payment", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + "sampling_rate": 0.1 } ``` +### LambdaPowertoolsFormatter + +Logger propagates a few formatting configurations to the built-in `LambdaPowertoolsFormatter` logging formatter. + +If you prefer configuring it separately, or you'd want to bring this JSON Formatter to another application, these are the supported settings: + +Parameter | Description | Default +------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- +**`json_serializer`** | function to serialize `obj` to a JSON formatted `str` | `json.dumps` +**`json_deserializer`** | function to deserialize `str`, `bytes`, `bytearray` containing a JSON document to a Python obj | `json.loads` +**`json_default`** | function to coerce unserializable values, when no custom serializer/deserializer is set | `str` +**`datefmt`** | string directives (strftime) to format log timestamp | `%Y-%m-%d %H:%M:%S,%F%z`, where `%F` is a custom ms directive +**`utc`** | set logging timestamp to UTC | `False` +**`log_record_order`** | set order of log keys when logging | `["level", "location", "message", "timestamp"]` +**`kwargs`** | key-value to be included in log messages | `None` + +=== "LambdaPowertoolsFormatter.py" + + ```python hl_lines="2 4-5" + from aws_lambda_powertools import Logger + from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter + + formatter = LambdaPowertoolsFormatter(utc=True, log_record_order=["message"]) + logger = Logger(service="example", logger_formatter=formatter) + ``` + ### Migrating from other Loggers If you're migrating from other Loggers, there are few key points to be aware of: [Service parameter](#the-service-parameter), [Inheriting Loggers](#inheriting-loggers), [Overriding Log records](#overriding-log-records), and [Logging exceptions](#logging-exceptions). @@ -530,24 +591,6 @@ Service is what defines the Logger name, including what the Lambda function is r For Logger, the `service` is the logging key customers can use to search log operations for one or more functions - For example, **search for all errors, or messages like X, where service is payment**. -??? tip "Logging output example" - - ```json hl_lines="5" - { - "timestamp": "2020-05-24 18:17:33,774", - "level": "DEBUG", - "location": "collect.handler:1", - "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, - "sampling_rate": 0.1, - "message": "Verifying whether order_id is present" - } - ``` - #### Inheriting Loggers > Python Logging hierarchy happens via the dot notation: `service`, `service.child`, `service.child_2` @@ -556,7 +599,8 @@ For inheritance, Logger uses a `child=True` parameter along with `service` being For child Loggers, we introspect the name of your module where `Logger(child=True, service="name")` is called, and we name your Logger as **{service}.{filename}**. -A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger: +!!! danger + A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger: === "incorrect_logger_inheritance.py" @@ -597,39 +641,39 @@ In this case, Logger will register a Logger named `payment`, and a Logger named You might want to continue to use the same date formatting style, or override `location` to display the `package.function_name:line_number` as you previously had. -Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`, and `datefmt`. However, `sampling_rate` key is part of the specification and cannot be suppressed. - -!!! note "`xray_trace_id` logging key" - This key is only added if X-Ray Tracing is enabled for your Lambda function. Once enabled, this key allows the integration between CloudWatch Logs and Service Lens. +Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`. === "lambda_handler.py" - > We honour standard [logging library string formats](https://docs.python.org/3/howto/logging.html#displaying-the-date-time-in-messages). + > We honour standard [logging library string formats](https://docs.python.org/3/howto/logging.html#displaying-the-date-time-in-messages){target="_blank"}. - ```python hl_lines="4 7" + ```python hl_lines="7 10" from aws_lambda_powertools import Logger - # override default values for location and timestamp format - logger = Logger(location="[%(funcName)s] %(module)s", datefmt="%m/%d/%Y %I:%M:%S %p") + date_format = "%m/%d/%Y %I:%M:%S %p" + location_format = "[%(funcName)s] %(module)s" - # suppress location key - logger = Logger(stream=stdout, location=None) + # override location and timestamp format + logger = Logger(service="payment", location=location_format, datefmt=date_format) + + # suppress the location key with a None value + logger_two = Logger(service="payment", location=None) + + logger.info("Collecting payment") ``` === "Example CloudWatch Logs excerpt" ```json hl_lines="3 5" { "level": "INFO", - "location": "[] scratch", - "message": "hello world", + "location": "[] lambda_handler", + "message": "Collecting payment", "timestamp": "02/09/2021 09:25:17 AM", - "service": "service_undefined", - "sampling_rate": 0.0 + "service": "payment" } ``` +#### Reordering log keys position -##### Reordering log records position - -You can also change the order of the following log record keys via the `log_record_order` parameter: `level`, `location`, `message`, `xray_trace_id`, and `timestamp` +You can change the order of [standard Logger keys](#standard-structured-keys) or any keys that will be appended later at runtime via the `log_record_order` parameter. === "lambda_handler.py" @@ -637,10 +681,13 @@ You can also change the order of the following log record keys via the `log_reco from aws_lambda_powertools import Logger # make message as the first key - logger = Logger(stream=stdout, log_record_order=["message"]) + logger = Logger(service="payment", log_record_order=["message"]) - # Default key sorting order - # Logger(stream=stdout, log_record_order=["level","location","message","timestamp"]) + # make request_id that will be added later as the first key + # Logger(service="payment", log_record_order=["request_id"]) + + # Default key sorting order when omit + # Logger(service="payment", log_record_order=["level","location","message","timestamp"]) ``` === "Example CloudWatch Logs excerpt" ```json hl_lines="3 5" @@ -656,26 +703,194 @@ You can also change the order of the following log record keys via the `log_reco #### Setting timestamp to UTC -By default, this Logger and standard logging library emits records using local time timestamp. You can override this behaviour by updating the current converter set in our formatter: +By default, this Logger and standard logging library emits records using local time timestamp. You can override this behaviour via `utc` parameter: === "app.py" - ```python hl_lines="1 3 9" + ```python hl_lines="6" from aws_lambda_powertools import Logger - import time + logger = Logger(service="payment") + logger.info("Local time") - logger = Logger(service="sample_service") + logger_in_utc = Logger(service="payment", utc=True) + logger_in_utc.info("GMT time zone") + ``` - logger.info("Local time") +#### Custom function for unserializable values + +By default, Logger uses `str` to handle values non-serializable by JSON. You can override this behaviour via `json_default` parameter by passing a Callable: + +=== "collect.py" + + ```python hl_lines="3-4 9 12" + from aws_lambda_powertools import Logger + + def custom_json_default(value): + return f"" + + class Unserializable: + pass + + logger = Logger(service="payment", json_default=custom_json_default) + + def handler(event, context): + logger.info(Unserializable()) + ``` +=== "Example CloudWatch Logs excerpt" + ```json hl_lines="4" + { + "level": "INFO", + "location": "collect.handler:8", + "message": """", + "timestamp": "2021-05-03 15:17:23,632+0200", + "service": "payment" + } + ``` + +#### Bring your own handler - logger._logger.handlers[0].formatter.converter = time.gmtime +By default, Logger uses StreamHandler and logs to standard output. You can override this behaviour via `logger_handler` parameter: - logger.info("GMT time") +=== "collect.py" + + ```python hl_lines="3-4 9 12" + import logging + from pathlib import Path + + from aws_lambda_powertools import Logger + + log_file = Path("/tmp/log.json") + log_file_handler = logging.FileHandler(filename=log_file) + logger = Logger(service="payment", logger_handler=log_file_handler) + + logger.info("Collecting payment") ``` +#### Bring your own formatter + +By default, Logger uses [LambdaPowertoolsFormatter](#lambdapowertoolsformatter) that persists its custom structure between non-cold start invocations. There could be scenarios where the existing feature set isn't sufficient to your formatting needs. + +For **minor changes like remapping keys** after all log record processing has completed, you can override `serialize` method from [LambdaPowertoolsFormatter](#lambdapowertoolsformatter): + +=== "custom_formatter.py" + + ```python + from aws_lambda_powertools import Logger + from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter + + from typing import Dict + + class CustomFormatter(LambdaPowertoolsFormatter): + def serialize(self, log: Dict) -> str: + """Serialize final structured log dict to JSON str""" + log["event"] = log.pop("message") # rename message key to event + return self.json_serializer(log) # use configured json serializer + + my_formatter = CustomFormatter() + logger = Logger(service="example", logger_formatter=my_formatter) + logger.info("hello") + ``` + +For **replacing the formatter entirely**, you can subclass `BasePowertoolsFormatter`, implement `append_keys` method, and override `format` standard logging method. This ensures the current feature set of Logger like [injecting Lambda context](#capturing-lambda-context-info) and [sampling](#sampling-debug-logs) will continue to work. + +!!! info + You might need to implement `remove_keys` method if you make use of the feature too. + +=== "collect.py" + + ```python hl_lines="2 4 7 12 16 27" + from aws_lambda_powertools import Logger + from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter + + class CustomFormatter(BasePowertoolsFormatter): + custom_format = {} # arbitrary dict to hold our structured keys + + def append_keys(self, **additional_keys): + # also used by `inject_lambda_context` decorator + self.custom_format.update(additional_keys) + + # Optional unless you make use of this Logger feature + def remove_keys(self, keys: Iterable[str]): + for key in keys: + self.custom_format.pop(key, None) + + def format(self, record: logging.LogRecord) -> str: # noqa: A003 + """Format logging record as structured JSON str""" + return json.dumps( + { + "event": super().format(record), + "timestamp": self.formatTime(record), + "my_default_key": "test", + **self.custom_format, + } + ) + + logger = Logger(service="payment", logger_formatter=CustomFormatter()) + + @logger.inject_lambda_context + def handler(event, context): + logger.info("Collecting payment") + ``` +=== "Example CloudWatch Logs excerpt" + + ```json hl_lines="2-4" + { + "event": "Collecting payment", + "timestamp": "2021-05-03 11:47:12,494", + "my_default_key": "test", + "cold_start": true, + "lambda_function_name": "test", + "lambda_function_memory_size": 128, + "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72" + } + ``` + +#### Bring your own JSON serializer + +By default, Logger uses `json.dumps` and `json.loads` as serializer and deserializer respectively. There could be scenarios where you are making use of alternative JSON libraries like [orjson](https://github.com/ijl/orjson){target="_blank"}. + +As parameters don't always translate well between them, you can pass any callable that receives a `Dict` and return a `str`: + +=== "collect.py" + + ```python hl_lines="1 5-6 9-10" + import orjson + + from aws_lambda_powertools import Logger + + custom_serializer = orjson.dumps + custom_deserializer = orjson.loads + + logger = Logger(service="payment", + json_serializer=custom_serializer, + json_deserializer=custom_deserializer + ) + + # when using parameters, you can pass a partial + # custom_serializer=functools.partial(orjson.dumps, option=orjson.OPT_SERIALIZE_NUMPY) + ``` + +## Built-in Correlation ID expressions + +You can use any of the following built-in JMESPath expressions as part of [inject_lambda_context decorator](#setting-a-correlation-id). + +!!! note "Escaping necessary for the `-` character" + Any object key named with `-` must be escaped, for example **`request.headers."x-amzn-trace-id"`**. + +Name | Expression | Description +------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- +**API_GATEWAY_REST** | `"requestContext.requestId"` | API Gateway REST API request ID +**API_GATEWAY_HTTP** | `"requestContext.requestId"` | API Gateway HTTP API request ID +**APPSYNC_RESOLVER** | `'request.headers."x-amzn-trace-id"'` | AppSync X-Ray Trace ID +**APPLICATION_LOAD_BALANCER** | `'headers."x-amzn-trace-id"'` | ALB X-Ray Trace ID +**EVENT_BRIDGE** | `"id"` | EventBridge Event ID + ## Testing your code +### Inject Lambda Context + When unit testing your code that makes use of `inject_lambda_context` decorator, you need to pass a dummy Lambda Context, or else Logger will fail. This is a Pytest sample that provides the minimum information necessary for Logger to succeed: @@ -727,6 +942,9 @@ This is a Pytest sample that provides the minimum information necessary for Logg your_lambda_handler(test_event, lambda_context) ``` +!!! tip + If you're using pytest and are looking to assert plain log messages, do check out the built-in [caplog fixture](https://docs.pytest.org/en/latest/how-to/logging.html){target="_blank"}. + ### Pytest live log feature Pytest Live Log feature duplicates emitted log messages in order to style log statements according to their levels, for this to work use `POWERTOOLS_LOG_DEDUPLICATION_DISABLED` env var. @@ -738,23 +956,6 @@ POWERTOOLS_LOG_DEDUPLICATION_DISABLED="1" pytest -o log_cli=1 !!! warning This feature should be used with care, as it explicitly disables our ability to filter propagated messages to the root logger (if configured). -## Built-in Correlation ID expressions - -> New in 1.12.0 - -You can use any of the following built-in JMESPath expressions as part of [inject_lambda_context decorator](#setting-a-correlation-id). - -!!! note "Escaping necessary for the `-` character" - Any object key named with `-` must be escaped, for example **`request.headers."x-amzn-trace-id"`**. - -Name | Expression | Description -------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- -**API_GATEWAY_REST** | `"requestContext.requestId"` | API Gateway REST API request ID -**API_GATEWAY_HTTP** | `"requestContext.requestId"` | API Gateway HTTP API request ID -**APPSYNC_RESOLVER** | `'request.headers."x-amzn-trace-id"'` | AppSync X-Ray Trace ID -**APPLICATION_LOAD_BALANCER** | `'headers."x-amzn-trace-id"'` | ALB X-Ray Trace ID -**EVENT_BRIDGE** | `"id"` | EventBridge Event ID - ## FAQ **How can I enable boto3 and botocore library logging?** @@ -783,34 +984,36 @@ for the given name and level to the logging module. By default, this logs all bo return response.get("Buckets", []) ``` -**What's the difference between `structure_log` and `extra`?** +**What's the difference between `append_keys` and `extra`?** -Keys added with `structure_log` will persist across multiple log messages while keys added via `extra` will only be available in a given log message operation. +Keys added with `append_keys` will persist across multiple log messages while keys added via `extra` will only be available in a given log message operation. Here's an example where we persist `payment_id` not `request_id`. Note that `payment_id` remains in both log messages while `booking_id` is only available in the first message. === "lambda_handler.py" - ```python hl_lines="4 8" + ```python hl_lines="6 10" from aws_lambda_powertools import Logger logger = Logger(service="payment") - logger.structure_logs(append=True, payment_id="123456789") - try: - booking_id = book_flight() - logger.info("Flight booked successfully", extra={ "booking_id": booking_id}) - except BookingReservationError: - ... + def handler(event, context): + logger.append_keys(payment_id="123456789") + + try: + booking_id = book_flight() + logger.info("Flight booked successfully", extra={ "booking_id": booking_id}) + except BookingReservationError: + ... - logger.info("goodbye") + logger.info("goodbye") ``` === "Example CloudWatch Logs excerpt" ```json hl_lines="8-9 18" { "level": "INFO", - "location": ":5", + "location": ":10", "message": "Flight booked successfully", "timestamp": "2021-01-12 14:09:10,859", "service": "payment", @@ -820,7 +1023,7 @@ Here's an example where we persist `payment_id` not `request_id`. Note that `pay }, { "level": "INFO", - "location": ":6", + "location": ":14", "message": "goodbye", "timestamp": "2021-01-12 14:09:10,860", "service": "payment", diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 984ad760d28..b556dce2a9e 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -74,22 +74,28 @@ You can create metrics using `add_metric`, and you can create dimensions for all === "Metrics" - ```python hl_lines="5" + ```python hl_lines="8" from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) ``` === "Metrics with custom dimensions" - ```python hl_lines="5 6" + ```python hl_lines="8-9" from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.add_dimension(name="environment", value="prod") - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_dimension(name="environment", value="prod") + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) ``` !!! tip "Autocomplete Metric Units" @@ -98,6 +104,42 @@ You can create metrics using `add_metric`, and you can create dimensions for all !!! note "Metrics overflow" CloudWatch EMF supports a max of 100 metrics per batch. Metrics utility will flush all metrics when adding the 100th metric. Subsequent metrics, e.g. 101th, will be aggregated into a new EMF object, for your convenience. +!!! warning "Do not create metrics or dimensions outside the handler" + Metrics or dimensions added in the global scope will only be added during cold start. Disregard if you that's the intended behaviour. + +### Adding default dimensions + +You can use either `set_default_dimensions` method or `default_permissions` parameter in `log_metrics` decorator to persist dimensions across Lambda invocations. + +If you'd like to remove them at some point, you can use `clear_default_dimensions` method. + +=== "set_default_dimensions method" + + ```python hl_lines="5" + from aws_lambda_powertools import Metrics + from aws_lambda_powertools.metrics import MetricUnit + + metrics = Metrics(namespace="ExampleApplication", service="booking") + metrics.set_default_dimensions(environment="prod", another="one") + + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + ``` +=== "with log_metrics decorator" + + ```python hl_lines="5 7" + from aws_lambda_powertools import Metrics + from aws_lambda_powertools.metrics import MetricUnit + + metrics = Metrics(namespace="ExampleApplication", service="booking") + DEFAULT_DIMENSIONS = {"environment": "prod", "another": "one"} + + @metrics.log_metrics(default_dimensions=DEFAULT_DIMENSIONS) + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + ``` + ### Flushing metrics As you finish adding all your metrics, you need to serialize and flush them to standard output. You can do that automatically with the `log_metrics` decorator. @@ -106,7 +148,7 @@ This decorator also **validates**, **serializes**, and **flushes** all your metr === "app.py" - ```python hl_lines="7" + ```python hl_lines="6" from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit @@ -115,7 +157,6 @@ This decorator also **validates**, **serializes**, and **flushes** all your metr @metrics.log_metrics def lambda_handler(evt, ctx): metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) - ... ``` === "Example CloudWatch Logs excerpt" @@ -158,7 +199,7 @@ If you want to ensure that at least one metric is emitted, you can pass `raise_o === "app.py" - ```python hl_lines="3" + ```python hl_lines="5" from aws_lambda_powertools.metrics import Metrics metrics = Metrics() @@ -177,20 +218,17 @@ When using multiple middlewares, use `log_metrics` as your **last decorator** wr === "nested_middlewares.py" - ```python hl_lines="9-10" + ```python hl_lines="7-8" from aws_lambda_powertools import Metrics, Tracer from aws_lambda_powertools.metrics import MetricUnit tracer = Tracer(service="booking") metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) - @metrics.log_metrics @tracer.capture_lambda_handler def lambda_handler(evt, ctx): metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) - ... ``` ### Capturing cold start metric @@ -199,7 +237,7 @@ You can optionally capture cold start metrics with `log_metrics` decorator via ` === "app.py" - ```python hl_lines="6" + ```python hl_lines="5" from aws_lambda_powertools import Metrics metrics = Metrics(service="ExampleService") @@ -216,6 +254,8 @@ If it's a cold start invocation, this feature will: This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated dimensions. +!!! info "We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=) if you'd prefer a flag to override it" + ## Advanced ### Adding metadata @@ -227,13 +267,16 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata === "app.py" - ```python hl_lines="6" + ```python hl_lines="9" from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) - metrics.add_metadata(key="booking_id", value="booking_uuid") + + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_metadata(key="booking_id", value="booking_uuid") ``` === "Example CloudWatch Logs excerpt" @@ -276,13 +319,15 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met === "single_metric.py" - ```python hl_lines="4" + ```python hl_lines="6-7" from aws_lambda_powertools import single_metric from aws_lambda_powertools.metrics import MetricUnit - with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ExampleApplication") as metric: - metric.add_dimension(name="function_context", value="$LATEST") - ... + + def lambda_handler(evt, ctx): + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ExampleApplication") as metric: + metric.add_dimension(name="function_context", value="$LATEST") + ... ``` ### Flushing metrics manually @@ -294,17 +339,18 @@ If you prefer not to use `log_metrics` because you might want to encapsulate add === "manual_metric_serialization.py" - ```python hl_lines="8-10" + ```python hl_lines="9-11" import json from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) - your_metrics_object = metrics.serialize_metric_set() - metrics.clear_metrics() - print(json.dumps(your_metrics_object)) + def lambda_handler(evt, ctx): + metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + your_metrics_object = metrics.serialize_metric_set() + metrics.clear_metrics() + print(json.dumps(your_metrics_object)) ``` ## Testing your code @@ -345,5 +391,78 @@ If you prefer setting environment variable for specific tests, and are using Pyt metrics = Metrics() metrics.clear_metrics() metrics_global.is_cold_start = True # ensure each test has cold start + metrics.clear_default_dimensions() # remove persisted default dimensions, if any yield ``` + +### Functional testing + +As metrics are logged to standard output, you can read standard output and assert whether metrics are present. Here's an example using `pytest` with `capsys` built-in fixture: + +=== "Assert single EMF blob with pytest.py" + + ```python hl_lines="6 9-10 23-34" + from aws_lambda_powertools import Metrics + from aws_lambda_powertools.metrics import MetricUnit + + import json + + def test_log_metrics(capsys): + # GIVEN Metrics is initialized + metrics = Metrics(namespace="ServerlessAirline") + + # WHEN we utilize log_metrics to serialize + # and flush all metrics at the end of a function execution + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_dimension(name="environment", value="prod") + + lambda_handler({}, {}) + log = capsys.readouterr().out.strip() # remove any extra line + metrics_output = json.loads(log) # deserialize JSON str + + # THEN we should have no exceptions + # and a valid EMF object should be flushed correctly + assert "SuccessfulBooking" in log # basic string assertion in JSON str + assert "SuccessfulBooking" in metrics_output["_aws"]["CloudWatchMetrics"][0]["Metrics"][0]["Name"] + ``` + +=== "Assert multiple EMF blobs with pytest" + + ```python hl_lines="8-9 11 21-23 25 29-30 32" + from aws_lambda_powertools import Metrics + from aws_lambda_powertools.metrics import MetricUnit + + from collections import namedtuple + + import json + + def capture_metrics_output_multiple_emf_objects(capsys): + return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] + + def test_log_metrics(capsys): + # GIVEN Metrics is initialized + metrics = Metrics(namespace="ServerlessAirline") + + # WHEN log_metrics is used with capture_cold_start_metric + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_dimension(name="environment", value="prod") + + # log_metrics uses function_name property from context to add as a dimension for cold start metric + LambdaContext = namedtuple("LambdaContext", "function_name") + lambda_handler({}, LambdaContext("example_fn") + + cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys) + + # THEN ColdStart metric and function_name dimension should be logged + # in a separate EMF blob than the application metrics + assert cold_start_blob["ColdStart"] == [1.0] + assert cold_start_blob["function_name"] == "example_fn" + + assert "SuccessfulBooking" in custom_metrics_blob # as per previous example + ``` + +!!! tip "For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/functional/test_metrics.py)" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 3dcb5da1e7c..fb99c6c702b 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -24,7 +24,7 @@ Before your use this utility, your AWS Lambda function [must have permissions](h > Example using AWS Serverless Application Model (SAM) === "template.yml" - ```yaml hl_lines="7 10" + ```yaml hl_lines="6 9" Resources: HelloWorldFunction: Type: AWS::Serverless::Function @@ -119,7 +119,8 @@ You can trace asynchronous functions and generator functions (including context === "Async" - ```python hl_lines="8" + + ```python hl_lines="7" import asyncio import contextlib from aws_lambda_powertools import Tracer diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index ecadbe530ae..bd9a8f8e98b 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -3,9 +3,6 @@ title: Idempotency description: Utility --- -!!! attention - **This utility is currently in beta**. Please open an [issue in GitHub](https://github.com/awslabs/aws-lambda-powertools-python/issues/new/choose) for any bugs or feature requests. - The idempotency utility provides a simple solution to convert your Lambda functions into idempotent operations which are safe to retry. diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 7c39b1ffd0a..83fca6b6741 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -161,6 +161,7 @@ Parser comes with the following built-in models: | **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams | | **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | | **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | +| **APIGatewayProxyEvent** | Lambda Event Source payload for Amazon API Gateway | ### extending built-in models @@ -271,13 +272,13 @@ Here's an example of parsing a model found in an event coming from EventBridge, } } - ret = parse(model=UserModel, envelope=envelopes.EventBridgeModel, event=payload) + ret = parse(model=UserModel, envelope=envelopes.EventBridgeEnvelope, event=payload) # Parsed model only contains our actual model, not the entire EventBridge + Payload parsed assert ret.password1 == ret.password2 # Same behaviour but using our decorator - @event_parser(model=UserModel, envelope=envelopes.EventBridgeModel) + @event_parser(model=UserModel, envelope=envelopes.EventBridgeEnvelope) def handler(event: UserModel, context: LambdaContext): assert event.password1 == event.password2 ``` @@ -285,26 +286,27 @@ Here's an example of parsing a model found in an event coming from EventBridge, **What's going on here, you might ask**: 1. We imported built-in `envelopes` from the parser utility -2. Used `envelopes.EventBridgeModel` as the envelope for our `UserModel` model +2. Used `envelopes.EventBridgeEnvelope` as the envelope for our `UserModel` model 3. Parser parsed the original event against the EventBridge model 4. Parser then parsed the `detail` key using `UserModel` -### built-in envelopes +### Built-in envelopes Parser comes with the following built-in envelopes, where `Model` in the return section is your given model. -| Envelope name | Behaviour | Return | -| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | -| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | -| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | -| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | -| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | -| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | -| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | - -### bringing your own envelope +| Envelope name | Behaviour | Return | +| -------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | +| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | +| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | +| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | +| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | +| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | +| **ApiGatewayEnvelope** | 1. Parses data using `APIGatewayProxyEventModel`.
2. Parses `body` key using your model and returns it. | `Model` | + +### Bringing your own envelope You can create your own Envelope model and logic by inheriting from `BaseEnvelope`, and implementing the `parse` method. diff --git a/mkdocs.yml b/mkdocs.yml index 43a7e125696..b07e30386dd 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -14,6 +14,7 @@ nav: - core/metrics.md - Event Handler: - core/event_handler/appsync.md + - core/event_handler/api_gateway.md - Utilities: - utilities/middleware_factory.md - utilities/parameters.md diff --git a/poetry.lock b/poetry.lock index 2a92a013fc1..1b262e0b8f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,7 +30,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "aws-xray-sdk" -version = "2.6.0" +version = "2.8.0" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." category = "main" optional = false @@ -39,7 +39,6 @@ python-versions = "*" [package.dependencies] botocore = ">=1.11.3" future = "*" -jsonpickle = "*" wrapt = "*" [[package]] @@ -82,20 +81,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.44" +version = "1.17.60" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.44,<1.21.0" +botocore = ">=1.20.60,<1.21.0" jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.3.0,<0.4.0" +s3transfer = ">=0.4.0,<0.5.0" [[package]] name = "botocore" -version = "1.20.44" +version = "1.20.60" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -107,7 +106,23 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.10.8)"] +crt = ["awscrt (==0.11.11)"] + +[[package]] +name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "click" @@ -195,7 +210,7 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "flake8" -version = "3.9.0" +version = "3.9.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false @@ -350,28 +365,29 @@ smmap = ">=3.0.1,<5" [[package]] name = "gitpython" -version = "3.1.14" +version = "3.1.15" description = "Python Git Library" category = "dev" optional = false -python-versions = ">=3.4" +python-versions = ">=3.5" [package.dependencies] gitdb = ">=4.0.1,<5" +typing-extensions = ">=3.7.4.0" [[package]] name = "idna" -version = "3.1" +version = "2.10" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" -optional = true -python-versions = ">=3.4" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "3.10.0" +version = "4.0.1" description = "Read metadata from Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -434,22 +450,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[[package]] -name = "jsonpickle" -version = "2.0.0" -description = "Python library for serializing any arbitrary object graph into JSON" -category = "main" -optional = false -python-versions = ">=2.7" - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["coverage (<5)", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "sklearn", "sqlalchemy", "enum34", "jsonlib"] -"testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] - [[package]] name = "livereload" version = "2.6.3" @@ -587,7 +587,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.1.0" +version = "7.1.3" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -621,11 +621,11 @@ python-versions = "*" [[package]] name = "nltk" -version = "3.5" +version = "3.6.2" description = "Natural Language Toolkit" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5.*" [package.dependencies] click = "*" @@ -634,9 +634,9 @@ regex = "*" tqdm = "*" [package.extras] -all = ["requests", "numpy", "python-crfsuite", "scikit-learn", "twython", "pyparsing", "scipy", "matplotlib", "gensim"] +all = ["matplotlib", "twython", "scipy", "numpy", "gensim (<4.0.0)", "python-crfsuite", "pyparsing", "scikit-learn", "requests"] corenlp = ["requests"] -machine_learning = ["gensim", "numpy", "python-crfsuite", "scikit-learn", "scipy"] +machine_learning = ["gensim (<4.0.0)", "numpy", "python-crfsuite", "scikit-learn", "scipy"] plot = ["matplotlib"] tgrep = ["pyparsing"] twitter = ["twython"] @@ -662,7 +662,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pbr" -version = "5.5.1" +version = "5.6.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -814,11 +814,11 @@ testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", [[package]] name = "pytest-mock" -version = "3.5.1" +version = "3.6.0" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] pytest = ">=5.0" @@ -872,19 +872,25 @@ python-versions = "*" [[package]] name = "requests" -version = "2.15.1" +version = "2.25.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<5" +idna = ">=2.5,<3" +urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "ruamel.yaml" -version = "0.17.2" +version = "0.17.4" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "dev" optional = false @@ -907,7 +913,7 @@ python-versions = "*" [[package]] name = "s3transfer" -version = "0.3.6" +version = "0.4.2" description = "An Amazon S3 Transfer Manager" category = "main" optional = false @@ -916,6 +922,9 @@ python-versions = "*" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + [[package]] name = "six" version = "1.15.0" @@ -975,7 +984,7 @@ python-versions = ">= 3.5" [[package]] name = "tqdm" -version = "4.59.0" +version = "4.60.0" description = "Fast, Extensible Progress Meter" category = "dev" optional = false @@ -988,7 +997,7 @@ telegram = ["requests"] [[package]] name = "typed-ast" -version = "1.4.2" +version = "1.4.3" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -1040,7 +1049,7 @@ requests = ">=2.0,<3.0" name = "zipp" version = "3.4.1" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -1054,7 +1063,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "5b211499bc362d34629e46a0d4a66a4ef844fc1540cf66e3250284a9431bffe8" +content-hash = "8b792951aedc1d50c4ef81e3e7a17db0d3754ffb230d2bf370306e75fed4815f" [metadata.files] appdirs = [ @@ -1070,8 +1079,8 @@ attrs = [ {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] aws-xray-sdk = [ - {file = "aws-xray-sdk-2.6.0.tar.gz", hash = "sha256:abf5b90f740e1f402e23414c9670e59cb9772e235e271fef2bce62b9100cbc77"}, - {file = "aws_xray_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:076f7c610cd3564bbba3507d43e328fb6ff4a2e841d3590f39b2c3ce99d41e1d"}, + {file = "aws-xray-sdk-2.8.0.tar.gz", hash = "sha256:90c2fcc982a770e86d009a4c3d2b5c3e372da91cb8284d982bae458e2c0bb268"}, + {file = "aws_xray_sdk-2.8.0-py2.py3-none-any.whl", hash = "sha256:487e44a2e0b2a5b994f7db5fad3a8115f1ea238249117a119bce8ca2750661bd"}, ] bandit = [ {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, @@ -1081,12 +1090,20 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.44-py2.py3-none-any.whl", hash = "sha256:e74da1da74fbefbe2db7a9c53082018d862433f35e2ecd4c173632efc5742f40"}, - {file = "boto3-1.17.44.tar.gz", hash = "sha256:ffb9b192b2b52ab88cde09e2af7d9fd6e541287e5719098be97ffd7144f47eb1"}, + {file = "boto3-1.17.60-py2.py3-none-any.whl", hash = "sha256:ae6f4753cc8bc970a8e50c4a7115a26bb49dfb84a0355351fc495bf6aec07c6f"}, + {file = "boto3-1.17.60.tar.gz", hash = "sha256:edd2f14f64e0afd2373cd9b2c839004c751afedc593ea3baca3ab36f7206644d"}, ] botocore = [ - {file = "botocore-1.20.44-py2.py3-none-any.whl", hash = "sha256:8a7f85bf05ad62551b0e6dfeeec471147b330cb2b5c7f48795057e811e6a2e77"}, - {file = "botocore-1.20.44.tar.gz", hash = "sha256:2958e3912939558fd789a64b23a10039d8b0c0c84a23b573f3f2e3154de357ad"}, + {file = "botocore-1.20.60-py2.py3-none-any.whl", hash = "sha256:bb63a112ef415638328e4535b75dbc32ebd88f06b7937ce1d297aa5b5527335a"}, + {file = "botocore-1.20.60.tar.gz", hash = "sha256:e19947a8978e99467e7b1843308d1adc8febaa4e221021d4befdaec83d993ee7"}, +] +certifi = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -1170,8 +1187,8 @@ fastjsonschema = [ {file = "fastjsonschema-2.15.0.tar.gz", hash = "sha256:e1ecba260bcffb7de0dda6aee74261da1e6dccde5ee04c1170b2dd97d2b87676"}, ] flake8 = [ - {file = "flake8-3.9.0-py2.py3-none-any.whl", hash = "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff"}, - {file = "flake8-3.9.0.tar.gz", hash = "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0"}, + {file = "flake8-3.9.1-py2.py3-none-any.whl", hash = "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"}, + {file = "flake8-3.9.1.tar.gz", hash = "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378"}, ] flake8-black = [ {file = "flake8-black-0.2.1.tar.gz", hash = "sha256:f26651bc10db786c03f4093414f7c9ea982ed8a244cec323c984feeffdf4c118"}, @@ -1219,16 +1236,16 @@ gitdb = [ {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ - {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, - {file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"}, + {file = "GitPython-3.1.15-py3-none-any.whl", hash = "sha256:a77824e516d3298b04fb36ec7845e92747df8fcfee9cacc32dd6239f9652f867"}, + {file = "GitPython-3.1.15.tar.gz", hash = "sha256:05af150f47a5cca3f4b0af289b73aef8cf3c4fe2385015b06220cbcdee48bb6e"}, ] idna = [ - {file = "idna-3.1-py3-none-any.whl", hash = "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16"}, - {file = "idna-3.1.tar.gz", hash = "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"}, + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.10.0-py3-none-any.whl", hash = "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe"}, - {file = "importlib_metadata-3.10.0.tar.gz", hash = "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a"}, + {file = "importlib_metadata-4.0.1-py3-none-any.whl", hash = "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"}, + {file = "importlib_metadata-4.0.1.tar.gz", hash = "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1250,10 +1267,6 @@ joblib = [ {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, ] -jsonpickle = [ - {file = "jsonpickle-2.0.0-py2.py3-none-any.whl", hash = "sha256:c1010994c1fbda87a48f8a56698605b598cb0fc6bb7e7927559fc1100e69aeac"}, - {file = "jsonpickle-2.0.0.tar.gz", hash = "sha256:0be49cba80ea6f87a168aa8168d717d00c6ca07ba83df3cec32d3b30bfe6fb9a"}, -] livereload = [ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] @@ -1344,8 +1357,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.0.tar.gz", hash = "sha256:1afaa5b174265eaa4a886f73187bb0e302a9596e9bfedb5aa2cb260d8b1d994e"}, - {file = "mkdocs_material-7.1.0-py2.py3-none-any.whl", hash = "sha256:13e73b3571d36f7e4a7dc11093323cff92095f4f219a00ba19c77a5e53aa6c55"}, + {file = "mkdocs-material-7.1.3.tar.gz", hash = "sha256:e34bba93ad1a0e6f9afc371f4ef55bedabbf13b9a786b013b0ce26ac55ec2932"}, + {file = "mkdocs_material-7.1.3-py2.py3-none-any.whl", hash = "sha256:437638b0de7a9113d7f1c9ddc93c0a29a3b808c71c3606713d8c1fa437697a3e"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, @@ -1356,7 +1369,8 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] nltk = [ - {file = "nltk-3.5.zip", hash = "sha256:845365449cd8c5f9731f7cb9f8bd6fd0767553b9d53af9eb1b3abf7700936b35"}, + {file = "nltk-3.6.2-py3-none-any.whl", hash = "sha256:240e23ab1ab159ef9940777d30c7c72d7e76d91877099218a7585370c11f6b9e"}, + {file = "nltk-3.6.2.zip", hash = "sha256:57d556abed621ab9be225cc6d2df1edce17572efb67a3d754630c9f8381503eb"}, ] packaging = [ {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, @@ -1367,8 +1381,8 @@ pathspec = [ {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, ] pbr = [ - {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, - {file = "pbr-5.5.1.tar.gz", hash = "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9"}, + {file = "pbr-5.6.0-py2.py3-none-any.whl", hash = "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"}, + {file = "pbr-5.6.0.tar.gz", hash = "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"}, ] pdoc3 = [ {file = "pdoc3-0.9.2.tar.gz", hash = "sha256:9df5d931f25f353c69c46819a3bd03ef96dd286f2a70bb1b93a23a781f91faa1"}, @@ -1438,8 +1452,8 @@ pytest-cov = [ {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pytest-mock = [ - {file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"}, - {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, + {file = "pytest-mock-3.6.0.tar.gz", hash = "sha256:f7c3d42d6287f4e45846c8231c31902b6fa2bea98735af413a43da4cf5b727f1"}, + {file = "pytest_mock-3.6.0-py3-none-any.whl", hash = "sha256:952139a535b5b48ac0bb2f90b5dd36b67c7e1ba92601f3a8012678c4bd7f0bcc"}, ] python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, @@ -1524,12 +1538,12 @@ regex = [ {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ - {file = "requests-2.15.1-py2.py3-none-any.whl", hash = "sha256:ff753b2196cd18b1bbeddc9dcd5c864056599f7a7d9a4fb5677e723efa2b7fb9"}, - {file = "requests-2.15.1.tar.gz", hash = "sha256:e5659b9315a0610505e050bb7190bf6fa2ccee1ac295f2b760ef9d8a03ebbb2e"}, + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] "ruamel.yaml" = [ - {file = "ruamel.yaml-0.17.2-py3-none-any.whl", hash = "sha256:0850def9ebca23b3a8c64c4b4115ebb6b364a10d49f89d289a26ee965e1e7d9d"}, - {file = "ruamel.yaml-0.17.2.tar.gz", hash = "sha256:8f1e15421668b9edf30ed02899f5f81aff9808a4271935776f61a99a569a13da"}, + {file = "ruamel.yaml-0.17.4-py3-none-any.whl", hash = "sha256:ac79fb25f5476e8e9ed1c53b8a2286d2c3f5dde49eb37dbcee5c7eb6a8415a22"}, + {file = "ruamel.yaml-0.17.4.tar.gz", hash = "sha256:44bc6b54fddd45e4bc0619059196679f9e8b79c027f4131bb072e6a22f4d5e28"}, ] "ruamel.yaml.clib" = [ {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, @@ -1565,8 +1579,8 @@ requests = [ {file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"}, ] s3transfer = [ - {file = "s3transfer-0.3.6-py2.py3-none-any.whl", hash = "sha256:5d48b1fd2232141a9d5fb279709117aaba506cacea7f86f11bc392f06bfa8fc2"}, - {file = "s3transfer-0.3.6.tar.gz", hash = "sha256:c5dadf598762899d8cfaecf68eba649cd25b0ce93b6c954b156aaa3eed160547"}, + {file = "s3transfer-0.4.2-py2.py3-none-any.whl", hash = "sha256:9b3752887a2880690ce628bc263d6d13a3864083aeacff4890c1c9839a5eb0bc"}, + {file = "s3transfer-0.4.2.tar.gz", hash = "sha256:cb022f4b16551edebbb31a377d3f09600dbada7363d8c5db7976e7f47732e1b2"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, @@ -1632,40 +1646,40 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] tqdm = [ - {file = "tqdm-4.59.0-py2.py3-none-any.whl", hash = "sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7"}, - {file = "tqdm-4.59.0.tar.gz", hash = "sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33"}, + {file = "tqdm-4.60.0-py2.py3-none-any.whl", hash = "sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3"}, + {file = "tqdm-4.60.0.tar.gz", hash = "sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae"}, ] typed-ast = [ - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, - {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, - {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, - {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, - {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, - {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, - {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, - {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, - {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, - {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, diff --git a/pyproject.toml b/pyproject.toml index 3c16f373756..d35e58e5664 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.14.0" +version = "1.15.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed"] @@ -20,7 +20,7 @@ license = "MIT-0" [tool.poetry.dependencies] python = "^3.6.1" -aws-xray-sdk = "2.6.0" +aws-xray-sdk = "^2.8.0" fastjsonschema = "^2.14.5" boto3 = "^1.12" jmespath = "^0.10.0" diff --git a/tests/events/apiGatewayProxyEvent.json b/tests/events/apiGatewayProxyEvent.json index 1fed04a25bf..8bc72b7ce78 100644 --- a/tests/events/apiGatewayProxyEvent.json +++ b/tests/events/apiGatewayProxyEvent.json @@ -49,10 +49,20 @@ "cognitoIdentityId": null, "cognitoIdentityPoolId": null, "principalOrgId": null, - "sourceIp": "IP", + "sourceIp": "192.168.0.1/32", "user": null, "userAgent": "user-agent", - "userArn": null + "userArn": null, + "clientCert": { + "clientCertPem": "CERT_CONTENT", + "subjectDN": "www.example.com", + "issuerDN": "Example issuer", + "serialNumber": "a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1", + "validity": { + "notBefore": "May 28 12:30:02 2019 GMT", + "notAfter": "Aug 5 09:36:04 2021 GMT" + } + } }, "path": "/my/path", "protocol": "HTTP/1.1", @@ -67,4 +77,4 @@ "stageVariables": null, "body": "Hello from Lambda!", "isBase64Encoded": true -} +} \ No newline at end of file diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py new file mode 100644 index 00000000000..caaaeb1b97b --- /dev/null +++ b/tests/functional/event_handler/test_api_gateway.py @@ -0,0 +1,492 @@ +import base64 +import json +import zlib +from decimal import Decimal +from pathlib import Path +from typing import Dict + +from aws_lambda_powertools.event_handler.api_gateway import ( + ApiGatewayResolver, + CORSConfig, + ProxyEventType, + Response, + ResponseBuilder, +) +from aws_lambda_powertools.shared.json_encoder import Encoder +from aws_lambda_powertools.utilities.data_classes import ALBEvent, APIGatewayProxyEvent, APIGatewayProxyEventV2 +from tests.functional.utils import load_event + + +def read_media(file_name: str) -> bytes: + path = Path(str(Path(__file__).parent.parent.parent.parent) + "/docs/media/" + file_name) + return path.read_bytes() + + +LOAD_GW_EVENT = load_event("apiGatewayProxyEvent.json") +TEXT_HTML = "text/html" +APPLICATION_JSON = "application/json" + + +def test_alb_event(): + # GIVEN a Application Load Balancer proxy type event + app = ApiGatewayResolver(proxy_type=ProxyEventType.ALBEvent) + + @app.get("/lambda") + def foo(): + assert isinstance(app.current_event, ALBEvent) + assert app.lambda_context == {} + return Response(200, TEXT_HTML, "foo") + + # WHEN calling the event handler + result = app(load_event("albEvent.json"), {}) + + # THEN process event correctly + # AND set the current_event type as ALBEvent + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["body"] == "foo" + + +def test_api_gateway_v1(): + # GIVEN a Http API V1 proxy type event + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + + @app.get("/my/path") + def get_lambda() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEvent) + assert app.lambda_context == {} + return Response(200, APPLICATION_JSON, json.dumps({"foo": "value"})) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEvent + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == APPLICATION_JSON + + +def test_api_gateway(): + # GIVEN a Rest API Gateway proxy type event + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + + @app.get("/my/path") + def get_lambda() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEvent) + return Response(200, TEXT_HTML, "foo") + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEvent + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["body"] == "foo" + + +def test_api_gateway_v2(): + # GIVEN a Http API V2 proxy type event + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEventV2) + + @app.post("/my/path") + def my_path() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEventV2) + post_data = app.current_event.json_body + return Response(200, "plain/text", post_data["username"]) + + # WHEN calling the event handler + result = app(load_event("apiGatewayProxyV2Event.json"), {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEventV2 + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == "plain/text" + assert result["body"] == "tom" + + +def test_include_rule_matching(): + # GIVEN + app = ApiGatewayResolver() + + @app.get("//") + def get_lambda(my_id: str, name: str) -> Response: + assert name == "my" + return Response(200, TEXT_HTML, my_id) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["body"] == "path" + + +def test_no_matches(): + # GIVEN an event that does not match any of the given routes + app = ApiGatewayResolver() + + @app.get("/not_matching_get") + def get_func(): + raise RuntimeError() + + @app.post("/no_matching_post") + def post_func(): + raise RuntimeError() + + @app.put("/no_matching_put") + def put_func(): + raise RuntimeError() + + @app.delete("/no_matching_delete") + def delete_func(): + raise RuntimeError() + + @app.patch("/no_matching_patch") + def patch_func(): + raise RuntimeError() + + def handler(event, context): + return app.resolve(event, context) + + # Also check check the route configurations + routes = app._routes + assert len(routes) == 5 + for route in routes: + if route.func == get_func: + assert route.method == "GET" + elif route.func == post_func: + assert route.method == "POST" + elif route.func == put_func: + assert route.method == "PUT" + elif route.func == delete_func: + assert route.method == "DELETE" + elif route.func == patch_func: + assert route.method == "PATCH" + + # WHEN calling the handler + # THEN return a 404 + result = handler(LOAD_GW_EVENT, None) + assert result["statusCode"] == 404 + # AND cors headers are not returned + assert "Access-Control-Allow-Origin" not in result["headers"] + + +def test_cors(): + # GIVEN a function with cors=True + # AND http method set to GET + app = ApiGatewayResolver() + + @app.get("/my/path", cors=True) + def with_cors() -> Response: + return Response(200, TEXT_HTML, "test") + + @app.get("/without-cors") + def without_cors() -> Response: + return Response(200, TEXT_HTML, "test") + + def handler(event, context): + return app.resolve(event, context) + + # WHEN calling the event handler + result = handler(LOAD_GW_EVENT, None) + + # THEN the headers should include cors headers + assert "headers" in result + headers = result["headers"] + assert headers["Content-Type"] == TEXT_HTML + assert headers["Access-Control-Allow-Origin"] == "*" + assert "Access-Control-Allow-Credentials" not in headers + assert headers["Access-Control-Allow-Headers"] == ",".join(sorted(CORSConfig._REQUIRED_HEADERS)) + + # THEN for routes without cors flag return no cors headers + mock_event = {"path": "/my/request", "httpMethod": "GET"} + result = handler(mock_event, None) + assert "Access-Control-Allow-Origin" not in result["headers"] + + +def test_compress(): + # GIVEN a function that has compress=True + # AND an event with a "Accept-Encoding" that include gzip + app = ApiGatewayResolver() + mock_event = {"path": "/my/request", "httpMethod": "GET", "headers": {"Accept-Encoding": "deflate, gzip"}} + expected_value = '{"test": "value"}' + + @app.get("/my/request", compress=True) + def with_compression() -> Response: + return Response(200, APPLICATION_JSON, expected_value) + + def handler(event, context): + return app.resolve(event, context) + + # WHEN calling the event handler + result = handler(mock_event, None) + + # THEN then gzip the response and base64 encode as a string + assert result["isBase64Encoded"] is True + body = result["body"] + assert isinstance(body, str) + decompress = zlib.decompress(base64.b64decode(body), wbits=zlib.MAX_WBITS | 16).decode("UTF-8") + assert decompress == expected_value + headers = result["headers"] + assert headers["Content-Encoding"] == "gzip" + + +def test_base64_encode(): + # GIVEN a function that returns bytes + app = ApiGatewayResolver() + mock_event = {"path": "/my/path", "httpMethod": "GET", "headers": {"Accept-Encoding": "deflate, gzip"}} + + @app.get("/my/path", compress=True) + def read_image() -> Response: + return Response(200, "image/png", read_media("idempotent_sequence_exception.png")) + + # WHEN calling the event handler + result = app(mock_event, None) + + # THEN return the body and a base64 encoded string + assert result["isBase64Encoded"] is True + body = result["body"] + assert isinstance(body, str) + headers = result["headers"] + assert headers["Content-Encoding"] == "gzip" + + +def test_compress_no_accept_encoding(): + # GIVEN a function with compress=True + # AND the request has no "Accept-Encoding" set to include gzip + app = ApiGatewayResolver() + expected_value = "Foo" + + @app.get("/my/path", compress=True) + def return_text() -> Response: + return Response(200, "text/plain", expected_value) + + # WHEN calling the event handler + result = app({"path": "/my/path", "httpMethod": "GET", "headers": {}}, None) + + # THEN don't perform any gzip compression + assert result["isBase64Encoded"] is False + assert result["body"] == expected_value + + +def test_cache_control_200(): + # GIVEN a function with cache_control set + app = ApiGatewayResolver() + + @app.get("/success", cache_control="max-age=600") + def with_cache_control() -> Response: + return Response(200, TEXT_HTML, "has 200 response") + + def handler(event, context): + return app.resolve(event, context) + + # WHEN calling the event handler + # AND the function returns a 200 status code + result = handler({"path": "/success", "httpMethod": "GET"}, None) + + # THEN return the set Cache-Control + headers = result["headers"] + assert headers["Content-Type"] == TEXT_HTML + assert headers["Cache-Control"] == "max-age=600" + + +def test_cache_control_non_200(): + # GIVEN a function with cache_control set + app = ApiGatewayResolver() + + @app.delete("/fails", cache_control="max-age=600") + def with_cache_control_has_500() -> Response: + return Response(503, TEXT_HTML, "has 503 response") + + def handler(event, context): + return app.resolve(event, context) + + # WHEN calling the event handler + # AND the function returns a 503 status code + result = handler({"path": "/fails", "httpMethod": "DELETE"}, None) + + # THEN return a Cache-Control of "no-cache" + headers = result["headers"] + assert headers["Content-Type"] == TEXT_HTML + assert headers["Cache-Control"] == "no-cache" + + +def test_rest_api(): + # GIVEN a function that returns a Dict + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + expected_dict = {"foo": "value", "second": Decimal("100.01")} + + @app.get("/my/path") + def rest_func() -> Dict: + return expected_dict + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN automatically process this as a json rest api response + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == APPLICATION_JSON + expected_str = json.dumps(expected_dict, separators=(",", ":"), indent=None, cls=Encoder) + assert result["body"] == expected_str + + +def test_handling_response_type(): + # GIVEN a function that returns Response + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) + + @app.get("/my/path") + def rest_func() -> Response: + return Response( + status_code=404, + content_type="used-if-not-set-in-header", + body="Not found", + headers={"Content-Type": "header-content-type-wins", "custom": "value"}, + ) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN the result can include some additional field control like overriding http headers + assert result["statusCode"] == 404 + assert result["headers"]["Content-Type"] == "header-content-type-wins" + assert result["headers"]["custom"] == "value" + assert result["body"] == "Not found" + + +def test_custom_cors_config(): + # GIVEN a custom cors configuration + allow_header = ["foo2"] + cors_config = CORSConfig( + allow_origin="https://foo1", + expose_headers=["foo1"], + allow_headers=allow_header, + max_age=100, + allow_credentials=True, + ) + app = ApiGatewayResolver(cors=cors_config) + event = {"path": "/cors", "httpMethod": "GET"} + + @app.get("/cors") + def get_with_cors(): + return {} + + @app.get("/another-one", cors=False) + def another_one(): + return {} + + # WHEN calling the event handler + result = app(event, None) + + # THEN routes by default return the custom cors headers + assert "headers" in result + headers = result["headers"] + assert headers["Content-Type"] == APPLICATION_JSON + assert headers["Access-Control-Allow-Origin"] == cors_config.allow_origin + expected_allows_headers = ",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS))) + assert headers["Access-Control-Allow-Headers"] == expected_allows_headers + assert headers["Access-Control-Expose-Headers"] == ",".join(cors_config.expose_headers) + assert headers["Access-Control-Max-Age"] == str(cors_config.max_age) + assert "Access-Control-Allow-Credentials" in headers + assert headers["Access-Control-Allow-Credentials"] == "true" + + # AND custom cors was set on the app + assert isinstance(app._cors, CORSConfig) + assert app._cors is cors_config + + # AND routes without cors don't include "Access-Control" headers + event = {"path": "/another-one", "httpMethod": "GET"} + result = app(event, None) + headers = result["headers"] + assert "Access-Control-Allow-Origin" not in headers + + +def test_no_content_response(): + # GIVEN a response with no content-type or body + response = Response(status_code=204, content_type=None, body=None, headers=None) + response_builder = ResponseBuilder(response) + + # WHEN calling to_dict + result = response_builder.build(APIGatewayProxyEvent(LOAD_GW_EVENT)) + + # THEN return an None body and no Content-Type header + assert result["statusCode"] == response.status_code + assert result["body"] is None + headers = result["headers"] + assert "Content-Type" not in headers + + +def test_no_matches_with_cors(): + # GIVEN an event that does not match any of the given routes + # AND cors enabled + app = ApiGatewayResolver(cors=CORSConfig()) + + # WHEN calling the handler + result = app({"path": "/another-one", "httpMethod": "GET"}, None) + + # THEN return a 404 + # AND cors headers are returned + assert result["statusCode"] == 404 + assert "Access-Control-Allow-Origin" in result["headers"] + + +def test_cors_preflight(): + # GIVEN an event for an OPTIONS call that does not match any of the given routes + # AND cors is enabled + app = ApiGatewayResolver(cors=CORSConfig()) + + @app.get("/foo") + def foo_cors(): + ... + + @app.route(method="delete", rule="/foo") + def foo_delete_cors(): + ... + + @app.post("/foo", cors=False) + def post_no_cors(): + ... + + # WHEN calling the handler + result = app({"path": "/foo", "httpMethod": "OPTIONS"}, None) + + # THEN return no content + # AND include Access-Control-Allow-Methods of the cors methods used + assert result["statusCode"] == 204 + assert result["body"] is None + headers = result["headers"] + assert "Content-Type" not in headers + assert "Access-Control-Allow-Origin" in result["headers"] + assert headers["Access-Control-Allow-Methods"] == "DELETE,GET,OPTIONS" + + +def test_custom_preflight_response(): + # GIVEN cors is enabled + # AND we have a custom preflight method + # AND the request matches this custom preflight route + app = ApiGatewayResolver(cors=CORSConfig()) + + @app.route(method="OPTIONS", rule="/some-call", cors=True) + def custom_preflight(): + return Response( + status_code=200, + content_type=TEXT_HTML, + body="Foo", + headers={"Access-Control-Allow-Methods": "CUSTOM"}, + ) + + @app.route(method="CUSTOM", rule="/some-call", cors=True) + def custom_method(): + ... + + # WHEN calling the handler + result = app({"path": "/some-call", "httpMethod": "OPTIONS"}, None) + + # THEN return the custom preflight response + assert result["statusCode"] == 200 + assert result["body"] == "Foo" + headers = result["headers"] + assert headers["Content-Type"] == TEXT_HTML + assert "Access-Control-Allow-Origin" in result["headers"] + assert headers["Access-Control-Allow-Methods"] == "CUSTOM" diff --git a/tests/functional/event_handler/test_appsync.py b/tests/functional/event_handler/test_appsync.py index c72331c32f1..e260fef89ab 100644 --- a/tests/functional/event_handler/test_appsync.py +++ b/tests/functional/event_handler/test_appsync.py @@ -1,18 +1,12 @@ import asyncio -import json import sys -from pathlib import Path import pytest from aws_lambda_powertools.event_handler import AppSyncResolver from aws_lambda_powertools.utilities.data_classes import AppSyncResolverEvent from aws_lambda_powertools.utilities.typing import LambdaContext - - -def load_event(file_name: str) -> dict: - path = Path(str(Path(__file__).parent.parent.parent) + "/events/" + file_name) - return json.loads(path.read_text()) +from tests.functional.utils import load_event def test_direct_resolver(): diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index d34d5da7d12..e100957dee7 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -1,7 +1,6 @@ import datetime import hashlib import json -import os from collections import namedtuple from decimal import Decimal from unittest import mock @@ -17,6 +16,7 @@ from aws_lambda_powertools.utilities.idempotency.idempotency import IdempotencyConfig from aws_lambda_powertools.utilities.validation import envelopes from aws_lambda_powertools.utilities.validation.base import unwrap_event_from_envelope +from tests.functional.utils import load_event TABLE_NAME = "TEST_TABLE" @@ -28,11 +28,7 @@ def config() -> Config: @pytest.fixture(scope="module") def lambda_apigw_event(): - full_file_name = os.path.dirname(os.path.realpath(__file__)) + "/../../events/" + "apiGatewayProxyV2Event.json" - with open(full_file_name) as fp: - event = json.load(fp) - - return event + return load_event("apiGatewayProxyV2Event.json") @pytest.fixture diff --git a/tests/functional/parser/schemas.py b/tests/functional/parser/schemas.py index a944b4f09c0..8ff56f703a7 100644 --- a/tests/functional/parser/schemas.py +++ b/tests/functional/parser/schemas.py @@ -81,3 +81,8 @@ class MyKinesisBusiness(BaseModel): class MyCloudWatchBusiness(BaseModel): my_message: str user: str + + +class MyApiGatewayBusiness(BaseModel): + message: str + username: str diff --git a/tests/functional/parser/test_alb.py b/tests/functional/parser/test_alb.py index 88631c7194c..d48e39f1bab 100644 --- a/tests/functional/parser/test_alb.py +++ b/tests/functional/parser/test_alb.py @@ -3,7 +3,7 @@ from aws_lambda_powertools.utilities.parser import ValidationError, event_parser from aws_lambda_powertools.utilities.parser.models import AlbModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=AlbModel) diff --git a/tests/functional/parser/test_apigw.py b/tests/functional/parser/test_apigw.py new file mode 100644 index 00000000000..fc679d5dc37 --- /dev/null +++ b/tests/functional/parser/test_apigw.py @@ -0,0 +1,102 @@ +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventModel +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyApiGatewayBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayEnvelope) +def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): + assert event.message == "Hello" + assert event.username == "Ran" + + +@event_parser(model=APIGatewayProxyEventModel) +def handle_apigw_event(event: APIGatewayProxyEventModel, _: LambdaContext): + assert event.body == "Hello from Lambda!" + return event + + +def test_apigw_event_with_envelope(): + event = load_event("apiGatewayProxyEvent.json") + event["body"] = '{"message": "Hello", "username": "Ran"}' + handle_apigw_with_envelope(event, LambdaContext()) + + +def test_apigw_event(): + event = load_event("apiGatewayProxyEvent.json") + parsed_event: APIGatewayProxyEventModel = handle_apigw_event(event, LambdaContext()) + assert parsed_event.version == event["version"] + assert parsed_event.resource == event["resource"] + assert parsed_event.path == event["path"] + assert parsed_event.headers == event["headers"] + assert parsed_event.multiValueHeaders == event["multiValueHeaders"] + assert parsed_event.queryStringParameters == event["queryStringParameters"] + assert parsed_event.multiValueQueryStringParameters == event["multiValueQueryStringParameters"] + + request_context = parsed_event.requestContext + assert request_context.accountId == event["requestContext"]["accountId"] + assert request_context.apiId == event["requestContext"]["apiId"] + + authorizer = request_context.authorizer + assert authorizer.claims is None + assert authorizer.scopes is None + + assert request_context.domainName == event["requestContext"]["domainName"] + assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] + assert request_context.extendedRequestId == event["requestContext"]["extendedRequestId"] + assert request_context.httpMethod == event["requestContext"]["httpMethod"] + + identity = request_context.identity + assert identity.accessKey == event["requestContext"]["identity"]["accessKey"] + assert identity.accountId == event["requestContext"]["identity"]["accountId"] + assert identity.caller == event["requestContext"]["identity"]["caller"] + assert ( + identity.cognitoAuthenticationProvider == event["requestContext"]["identity"]["cognitoAuthenticationProvider"] + ) + assert identity.cognitoAuthenticationType == event["requestContext"]["identity"]["cognitoAuthenticationType"] + assert identity.cognitoIdentityId == event["requestContext"]["identity"]["cognitoIdentityId"] + assert identity.cognitoIdentityPoolId == event["requestContext"]["identity"]["cognitoIdentityPoolId"] + assert identity.principalOrgId == event["requestContext"]["identity"]["principalOrgId"] + assert str(identity.sourceIp) == event["requestContext"]["identity"]["sourceIp"] + assert identity.user == event["requestContext"]["identity"]["user"] + assert identity.userAgent == event["requestContext"]["identity"]["userAgent"] + assert identity.userArn == event["requestContext"]["identity"]["userArn"] + assert identity.clientCert is not None + assert identity.clientCert.clientCertPem == event["requestContext"]["identity"]["clientCert"]["clientCertPem"] + assert identity.clientCert.subjectDN == event["requestContext"]["identity"]["clientCert"]["subjectDN"] + assert identity.clientCert.issuerDN == event["requestContext"]["identity"]["clientCert"]["issuerDN"] + assert identity.clientCert.serialNumber == event["requestContext"]["identity"]["clientCert"]["serialNumber"] + assert ( + identity.clientCert.validity.notBefore + == event["requestContext"]["identity"]["clientCert"]["validity"]["notBefore"] + ) + assert ( + identity.clientCert.validity.notAfter + == event["requestContext"]["identity"]["clientCert"]["validity"]["notAfter"] + ) + + assert request_context.path == event["requestContext"]["path"] + assert request_context.protocol == event["requestContext"]["protocol"] + assert request_context.requestId == event["requestContext"]["requestId"] + assert request_context.requestTime == event["requestContext"]["requestTime"] + convert_time = int(round(request_context.requestTimeEpoch.timestamp() * 1000)) + assert convert_time == 1583349317135 + assert request_context.resourceId == event["requestContext"]["resourceId"] + assert request_context.resourcePath == event["requestContext"]["resourcePath"] + assert request_context.stage == event["requestContext"]["stage"] + + assert parsed_event.pathParameters == event["pathParameters"] + assert parsed_event.stageVariables == event["stageVariables"] + assert parsed_event.body == event["body"] + assert parsed_event.isBase64Encoded == event["isBase64Encoded"] + + assert request_context.connectedAt is None + assert request_context.connectionId is None + assert request_context.eventType is None + assert request_context.messageDirection is None + assert request_context.messageId is None + assert request_context.routeKey is None + assert request_context.operationName is None + assert identity.apiKey is None + assert identity.apiKeyId is None diff --git a/tests/functional/parser/test_cloudwatch.py b/tests/functional/parser/test_cloudwatch.py index 9a61f339140..7290d0bffcb 100644 --- a/tests/functional/parser/test_cloudwatch.py +++ b/tests/functional/parser/test_cloudwatch.py @@ -9,7 +9,7 @@ from aws_lambda_powertools.utilities.parser.models import CloudWatchLogsLogEvent, CloudWatchLogsModel from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyCloudWatchBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=MyCloudWatchBusiness, envelope=envelopes.CloudWatchLogsEnvelope) diff --git a/tests/functional/parser/test_dynamodb.py b/tests/functional/parser/test_dynamodb.py index bd7e0795f42..9917fac234b 100644 --- a/tests/functional/parser/test_dynamodb.py +++ b/tests/functional/parser/test_dynamodb.py @@ -5,7 +5,7 @@ from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) diff --git a/tests/functional/parser/test_eventbridge.py b/tests/functional/parser/test_eventbridge.py index 7a3066d7b04..6242403ab35 100644 --- a/tests/functional/parser/test_eventbridge.py +++ b/tests/functional/parser/test_eventbridge.py @@ -5,7 +5,7 @@ from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedEventbridgeBusiness, MyEventbridgeBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) diff --git a/tests/functional/parser/test_kinesis.py b/tests/functional/parser/test_kinesis.py index 5a7a94e0dac..632a7463805 100644 --- a/tests/functional/parser/test_kinesis.py +++ b/tests/functional/parser/test_kinesis.py @@ -6,7 +6,7 @@ from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamModel, KinesisDataStreamRecordPayload from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyKinesisBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) diff --git a/tests/functional/parser/test_s3 object_event.py b/tests/functional/parser/test_s3 object_event.py index da015338cf4..90c2555360d 100644 --- a/tests/functional/parser/test_s3 object_event.py +++ b/tests/functional/parser/test_s3 object_event.py @@ -1,7 +1,7 @@ from aws_lambda_powertools.utilities.parser import event_parser from aws_lambda_powertools.utilities.parser.models import S3ObjectLambdaEvent from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=S3ObjectLambdaEvent) diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py index a9c325f3a97..71a5dc6afe3 100644 --- a/tests/functional/parser/test_s3.py +++ b/tests/functional/parser/test_s3.py @@ -1,7 +1,7 @@ from aws_lambda_powertools.utilities.parser import event_parser, parse from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=S3Model) diff --git a/tests/functional/parser/test_ses.py b/tests/functional/parser/test_ses.py index f96da7bad66..d434e2350f8 100644 --- a/tests/functional/parser/test_ses.py +++ b/tests/functional/parser/test_ses.py @@ -1,7 +1,7 @@ from aws_lambda_powertools.utilities.parser import event_parser from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event @event_parser(model=SesModel) diff --git a/tests/functional/parser/test_sns.py b/tests/functional/parser/test_sns.py index 015af3693fa..81158a4419e 100644 --- a/tests/functional/parser/test_sns.py +++ b/tests/functional/parser/test_sns.py @@ -5,7 +5,7 @@ from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event from tests.functional.validator.conftest import sns_event # noqa: F401 diff --git a/tests/functional/parser/test_sqs.py b/tests/functional/parser/test_sqs.py index 0cea8246b50..7ca883616f2 100644 --- a/tests/functional/parser/test_sqs.py +++ b/tests/functional/parser/test_sqs.py @@ -5,7 +5,7 @@ from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, event_parser from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness -from tests.functional.parser.utils import load_event +from tests.functional.utils import load_event from tests.functional.validator.conftest import sqs_event # noqa: F401 diff --git a/tests/functional/parser/utils.py b/tests/functional/parser/utils.py deleted file mode 100644 index 7cb949b1289..00000000000 --- a/tests/functional/parser/utils.py +++ /dev/null @@ -1,13 +0,0 @@ -import json -import os -from typing import Any - - -def get_event_file_path(file_name: str) -> str: - return os.path.dirname(os.path.realpath(__file__)) + "/../../events/" + file_name - - -def load_event(file_name: str) -> Any: - full_file_name = get_event_file_path(file_name) - with open(full_file_name) as fp: - return json.load(fp) diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 0221acc6853..d346eca480a 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -1,7 +1,6 @@ import base64 import datetime import json -import os from secrets import compare_digest from urllib.parse import quote_plus @@ -58,12 +57,7 @@ StreamViewType, ) from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent - - -def load_event(file_name: str) -> dict: - full_file_name = os.path.dirname(os.path.realpath(__file__)) + "/../events/" + file_name - with open(full_file_name) as fp: - return json.load(fp) +from tests.functional.utils import load_event def test_dict_wrapper_equals(): diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index ddf5ee226f5..ba6e82b72af 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -5,12 +5,14 @@ import random import string from collections import namedtuple +from typing import Iterable import pytest from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.logging import correlation_paths from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError +from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter from aws_lambda_powertools.logging.logger import set_package_logger from aws_lambda_powertools.shared import constants @@ -474,3 +476,89 @@ def handler(event, context): # THEN log = capture_logging_output(stdout) assert request_id == log["correlation_id"] + + +def test_logger_append_remove_keys(stdout, service_name): + # GIVEN a Logger is initialized + logger = Logger(service=service_name, stream=stdout) + extra_keys = {"request_id": "id", "context": "value"} + + # WHEN keys are updated + logger.append_keys(**extra_keys) + logger.info("message with new keys") + + # And removed + logger.remove_keys(extra_keys.keys()) + logger.info("message after keys being removed") + + # THEN additional keys should only be present in the first log statement + extra_keys_log, keys_removed_log = capture_multiple_logging_statements_output(stdout) + + assert extra_keys.items() <= extra_keys_log.items() + assert (extra_keys.items() <= keys_removed_log.items()) is False + + +def test_logger_custom_formatter(stdout, service_name, lambda_context): + class CustomFormatter(BasePowertoolsFormatter): + custom_format = {} + + def append_keys(self, **additional_keys): + self.custom_format.update(additional_keys) + + def remove_keys(self, keys: Iterable[str]): + for key in keys: + self.custom_format.pop(key, None) + + def format(self, record: logging.LogRecord) -> str: # noqa: A003 + return json.dumps( + { + "message": super().format(record), + "timestamp": self.formatTime(record), + "my_default_key": "test", + **self.custom_format, + } + ) + + custom_formatter = CustomFormatter() + + # GIVEN a Logger is initialized with a custom formatter + logger = Logger(service=service_name, stream=stdout, logger_formatter=custom_formatter) + + # WHEN a lambda function is decorated with logger + @logger.inject_lambda_context + def handler(event, context): + logger.info("Hello") + + handler({}, lambda_context) + + lambda_context_keys = ( + "function_name", + "function_memory_size", + "function_arn", + "function_request_id", + ) + + log = capture_logging_output(stdout) + + # THEN custom key should always be present + # and lambda contextual info should also be in the logs + assert "my_default_key" in log + assert all(k in log for k in lambda_context_keys) + + +def test_logger_custom_handler(lambda_context, service_name, tmp_path): + # GIVEN a Logger is initialized with a FileHandler + log_file = tmp_path / "log.json" + handler = logging.FileHandler(filename=log_file) + logger = Logger(service=service_name, logger_handler=handler) + + # WHEN a log statement happens + @logger.inject_lambda_context + def handler(event, context): + logger.info("custom handler") + + handler({}, lambda_context) + + # THEN we should output to a file not stdout + log = log_file.read_text() + assert "custom handler" in log diff --git a/tests/functional/test_aws_lambda_logging.py b/tests/functional/test_logger_powertools_formatter.py similarity index 84% rename from tests/functional/test_aws_lambda_logging.py rename to tests/functional/test_logger_powertools_formatter.py index b28a753fc98..4b92e6b47b9 100644 --- a/tests/functional/test_aws_lambda_logging.py +++ b/tests/functional/test_logger_powertools_formatter.py @@ -3,6 +3,7 @@ import json import random import string +import time import pytest @@ -167,13 +168,13 @@ def test_log_custom_formatting(stdout, service_name): log_dict: dict = json.loads(stdout.getvalue()) # THEN the `location` and "timestamp" should match the formatting - assert log_dict["location"] == "[test_log_custom_formatting] test_aws_lambda_logging" + assert log_dict["location"] == "[test_log_custom_formatting] test_logger_powertools_formatter" assert log_dict["timestamp"] == "fake-datefmt" def test_log_dict_key_strip_nones(stdout, service_name): # GIVEN a logger confirmation where we set `location` and `timestamp` to None - # Note: level, sampling_rate and service can not be suppressed + # Note: level and service cannot be suppressed logger = Logger(stream=stdout, level=None, location=None, timestamp=None, sampling_rate=None, service=None) # WHEN logging a message @@ -181,8 +182,8 @@ def test_log_dict_key_strip_nones(stdout, service_name): log_dict: dict = json.loads(stdout.getvalue()) - # THEN the keys should only include `level`, `message`, `service`, `sampling_rate` - assert sorted(log_dict.keys()) == ["level", "message", "sampling_rate", "service"] + # THEN the keys should only include `level`, `message`, `service` + assert sorted(log_dict.keys()) == ["level", "message", "service"] assert log_dict["service"] == "service_undefined" @@ -241,3 +242,36 @@ def test_log_dict_xray_is_updated_when_tracing_id_changes(stdout, monkeypatch, s assert log_dict_2["xray_trace_id"] == trace_id_2 monkeypatch.delenv(name="_X_AMZN_TRACE_ID") + + +def test_log_custom_std_log_attribute(stdout, service_name): + # GIVEN a logger where we have a standard log attr process + # https://docs.python.org/3/library/logging.html#logrecord-attributes + logger = Logger(service=service_name, stream=stdout, process="%(process)d") + + # WHEN logging a message + logger.info("foo") + + log_dict: dict = json.loads(stdout.getvalue()) + + # THEN process key should be evaluated + assert "%" not in log_dict["process"] + + +def test_log_in_utc(service_name): + # GIVEN a logger where UTC TZ has been set + logger = Logger(service=service_name, utc=True) + + # THEN logging formatter time converter should use gmtime fn + assert logger._logger.handlers[0].formatter.converter == time.gmtime + + +@pytest.mark.parametrize("message", ["hello", 1.10, {}, [], True, object()]) +def test_logging_various_primitives(stdout, service_name, message): + # GIVEN a logger with default settings + logger = Logger(service=service_name, stream=stdout) + + # WHEN logging a message of multiple common types + # THEN it should raise no serialization/deserialization error + logger.info(message) + json.loads(stdout.getvalue()) diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index 3090a1228d2..ae160c65d87 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -15,6 +15,7 @@ def reset_metric_set(): metrics = Metrics() metrics.clear_metrics() + metrics.clear_default_dimensions() metrics_global.is_cold_start = True # ensure each test has cold start yield @@ -84,7 +85,7 @@ def a_hundred_metrics() -> List[Dict[str, str]]: def serialize_metrics( metrics: List[Dict], dimensions: List[Dict], namespace: str, metadatas: List[Dict] = None ) -> Dict: - """ Helper function to build EMF object from a list of metrics, dimensions """ + """Helper function to build EMF object from a list of metrics, dimensions""" my_metrics = MetricManager(namespace=namespace) for dimension in dimensions: my_metrics.add_dimension(**dimension) @@ -101,7 +102,7 @@ def serialize_metrics( def serialize_single_metric(metric: Dict, dimension: Dict, namespace: str, metadata: Dict = None) -> Dict: - """ Helper function to build EMF object from a given metric, dimension and namespace """ + """Helper function to build EMF object from a given metric, dimension and namespace""" my_metrics = MetricManager(namespace=namespace) my_metrics.add_metric(**metric) my_metrics.add_dimension(**dimension) @@ -113,7 +114,7 @@ def serialize_single_metric(metric: Dict, dimension: Dict, namespace: str, metad def remove_timestamp(metrics: List): - """ Helper function to remove Timestamp key from EMF objects as they're built at serialization """ + """Helper function to remove Timestamp key from EMF objects as they're built at serialization""" for metric in metrics: del metric["_aws"]["Timestamp"] @@ -749,3 +750,75 @@ def test_metric_manage_metadata_set(): assert metric.metadata_set == expected_dict except AttributeError: pytest.fail("AttributeError should not be raised") + + +def test_log_persist_default_dimensions(capsys, metrics, dimensions, namespace): + # GIVEN Metrics is initialized and we persist a set of default dimensions + my_metrics = Metrics(namespace=namespace) + my_metrics.set_default_dimensions(environment="test", log_group="/lambda/test") + + # WHEN we utilize log_metrics to serialize + # and flush metrics and clear all metrics and dimensions from memory + # at the end of a function execution + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + for metric in metrics: + my_metrics.add_metric(**metric) + + lambda_handler({}, {}) + first_invocation = capture_metrics_output(capsys) + + lambda_handler({}, {}) + second_invocation = capture_metrics_output(capsys) + + # THEN we should have default dimensions in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_clear_default_dimensions(namespace): + # GIVEN Metrics is initialized and we persist a set of default dimensions + my_metrics = Metrics(namespace=namespace) + my_metrics.set_default_dimensions(environment="test", log_group="/lambda/test") + + # WHEN they are removed via clear_default_dimensions method + my_metrics.clear_default_dimensions() + + # THEN there should be no default dimensions + assert not my_metrics.default_dimensions + + +def test_default_dimensions_across_instances(namespace): + # GIVEN Metrics is initialized and we persist a set of default dimensions + my_metrics = Metrics(namespace=namespace) + my_metrics.set_default_dimensions(environment="test", log_group="/lambda/test") + + # WHEN a new Metrics instance is created + same_metrics = Metrics() + + # THEN default dimensions should also be present + assert "environment" in same_metrics.default_dimensions + + +def test_log_metrics_with_default_dimensions(capsys, metrics, dimensions, namespace): + # GIVEN Metrics is initialized + my_metrics = Metrics(namespace=namespace) + default_dimensions = {"environment": "test", "log_group": "/lambda/test"} + + # WHEN we utilize log_metrics with default dimensions to serialize + # and flush metrics and clear all metrics and dimensions from memory + # at the end of a function execution + @my_metrics.log_metrics(default_dimensions=default_dimensions) + def lambda_handler(evt, ctx): + for metric in metrics: + my_metrics.add_metric(**metric) + + lambda_handler({}, {}) + first_invocation = capture_metrics_output(capsys) + + lambda_handler({}, {}) + second_invocation = capture_metrics_output(capsys) + + # THEN we should have default dimensions in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation diff --git a/tests/functional/utils.py b/tests/functional/utils.py new file mode 100644 index 00000000000..a58d27f3526 --- /dev/null +++ b/tests/functional/utils.py @@ -0,0 +1,8 @@ +import json +from pathlib import Path +from typing import Any + + +def load_event(file_name: str) -> Any: + path = Path(str(Path(__file__).parent.parent) + "/events/" + file_name) + return json.loads(path.read_text())