diff --git a/.github/labeler.yml b/.github/labeler.yml index f1c2928a7c7..df3748b34dd 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,18 +1,23 @@ # Label general purpose utilities area/utilities: + - aws_lambda_powertools_python/utilities/* - aws_lambda_powertools_python/utilities/**/* - aws_lambda_powertools_python/middleware_factory/* + - aws_lambda_powertools_python/middleware_factory/**/* # Label core utilities area/logger: - aws_lambda_powertools_python/logging/* + - aws_lambda_powertools_python/logging/**/* area/tracer: - aws_lambda_powertools_python/tracing/* + - aws_lambda_powertools_python/tracing/**/* area/metrics: - aws_lambda_powertools_python/metrics/* + - aws_lambda_powertools_python/metrics/**/* documentation: - docs/* diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml deleted file mode 100644 index 5ec42cce815..00000000000 --- a/.github/workflows/labeler.yml +++ /dev/null @@ -1,11 +0,0 @@ -name: "PR Labeler" -on: -- pull_request_target - -jobs: - triage: - runs-on: ubuntu-latest - steps: - - uses: actions/labeler@main - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/CHANGELOG.md b/CHANGELOG.md index 75aa5a18c45..bf37163bc4a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,19 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## [1.11.0] - 2021-03-05 +### Fixed + +* **Tracer**: Lazy loads X-Ray SDK to increase perf by 75% for those not instantiating Tracer +* **Metrics**: Optimize validation and serialization to increase perf by nearly 50% for large operations (<1ms) + +### Added + +* **Dataclass**: Add new Amazon Connect contact flow event +* **Idempotency**: New Idempotency utility +* **Docs**: Add example on how to integrate Batch utility with Sentry.io +* **Internal**: Added performance SLA tests for high level imports and Metrics validation/serialization + ## [1.10.5] - 2021-02-17 No changes. Bumped version to trigger new pipeline build for layer publishing. diff --git a/LICENSE b/LICENSE index 9e30e05ab6d..17c63bac7fb 100644 --- a/LICENSE +++ b/LICENSE @@ -12,3 +12,182 @@ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------- + +** Tensorflow - https://github.com/tensorflow/tensorflow/ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/Makefile b/Makefile index 27a2896d812..d11ea72779f 100644 --- a/Makefile +++ b/Makefile @@ -17,10 +17,11 @@ lint: format poetry run flake8 aws_lambda_powertools/* tests/* test: - poetry run pytest -vvv --cov=./ --cov-report=xml + poetry run pytest -m "not perf" --cov=./ --cov-report=xml + poetry run pytest --cache-clear tests/performance coverage-html: - poetry run pytest --cov-report html + poetry run pytest -m "not perf" --cov-report=html pr: lint test security-baseline complexity-baseline diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index ecc44edf0fa..bfe88bf899b 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -3,24 +3,18 @@ import logging import numbers import os -import pathlib from collections import defaultdict from enum import Enum from typing import Any, Dict, List, Union -import fastjsonschema - from ..shared import constants from ..shared.functions import resolve_env_var_choice from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError logger = logging.getLogger(__name__) -_schema_path = pathlib.Path(__file__).parent / "./schema.json" -with _schema_path.open() as f: - CLOUDWATCH_EMF_SCHEMA = json.load(f) - MAX_METRICS = 100 +MAX_DIMENSIONS = 9 class MetricUnit(Enum): @@ -184,6 +178,12 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me if self.service and not self.dimension_set.get("service"): self.dimension_set["service"] = self.service + if len(metrics) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + if self.namespace is None: + raise SchemaValidationError("Must contain a metric namespace.") + logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions}) metric_names_and_units: List[Dict[str, str]] = [] # [ { "Name": "metric_name", "Unit": "Count" } ] @@ -213,12 +213,6 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me **metric_names_and_values, # "single_metric": 1.0 } - try: - logger.debug("Validating serialized metrics against CloudWatch EMF schema") - fastjsonschema.validate(definition=CLOUDWATCH_EMF_SCHEMA, data=embedded_metrics_object) - except fastjsonschema.JsonSchemaException as e: - message = f"Invalid format. Error: {e.message}, Invalid item: {e.name}" # noqa: B306, E501 - raise SchemaValidationError(message) return embedded_metrics_object def add_dimension(self, name: str, value: str): @@ -238,7 +232,10 @@ def add_dimension(self, name: str, value: str): Dimension value """ logger.debug(f"Adding dimension: {name}:{value}") - + if len(self.dimension_set) == 9: + raise SchemaValidationError( + f"Maximum number of dimensions exceeded ({MAX_DIMENSIONS}): Unable to add dimension {name}." + ) # Cast value to str according to EMF spec # Majority of values are expected to be string already, so # checking before casting improves performance in most cases @@ -299,7 +296,7 @@ def __extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str: if unit in self._metric_unit_options: unit = MetricUnit[unit].value - if unit not in self._metric_units: # str correta + if unit not in self._metric_units: raise MetricUnitError( f"Invalid metric unit '{unit}', expected either option: {self._metric_unit_options}" ) diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 4451eb2d1d0..8e7ace1e5bd 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -102,8 +102,12 @@ def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = No Raises ------ - e - Propagate error received + MetricUnitError + When metric metric isn't supported by CloudWatch + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation """ metric_set = None try: @@ -112,4 +116,4 @@ def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = No yield metric metric_set: Dict = metric.serialize_metric_set() finally: - print(json.dumps(metric_set)) + print(json.dumps(metric_set, separators=(",", ":"))) diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 2ab6cb35b4a..4f53231b84f 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -57,13 +57,19 @@ def do_something(): Parameters ---------- - MetricManager : MetricManager - Inherits from `aws_lambda_powertools.metrics.base.MetricManager` + service : str, optional + service name to be used as metric dimension, by default "service_undefined" + namespace : str + Namespace for metrics Raises ------ - e - Propagate error received + MetricUnitError + When metric metric isn't supported by CloudWatch + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation """ _metrics = {} @@ -150,7 +156,7 @@ def decorate(event, context): else: metrics = self.serialize_metric_set() self.clear_metrics() - print(json.dumps(metrics)) + print(json.dumps(metrics, separators=(",", ":"))) return response diff --git a/aws_lambda_powertools/metrics/schema.json b/aws_lambda_powertools/metrics/schema.json deleted file mode 100644 index f948ed979fa..00000000000 --- a/aws_lambda_powertools/metrics/schema.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "type": "object", - "title": "Root Node", - "required": [ - "_aws" - ], - "properties": { - "_aws": { - "$id": "#/properties/_aws", - "type": "object", - "title": "Metadata", - "required": [ - "Timestamp", - "CloudWatchMetrics" - ], - "properties": { - "Timestamp": { - "$id": "#/properties/_aws/properties/Timestamp", - "type": "integer", - "title": "The Timestamp Schema", - "examples": [ - 1565375354953 - ] - }, - "CloudWatchMetrics": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics", - "type": "array", - "title": "MetricDirectives", - "items": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items", - "type": "object", - "title": "MetricDirective", - "required": [ - "Namespace", - "Dimensions", - "Metrics" - ], - "properties": { - "Namespace": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Namespace", - "type": "string", - "title": "CloudWatch Metrics Namespace", - "examples": [ - "MyApp" - ], - "pattern": "^(.*)$", - "minLength": 1 - }, - "Dimensions": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions", - "type": "array", - "title": "The Dimensions Schema", - "minItems": 1, - "items": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items", - "type": "array", - "title": "DimensionSet", - "minItems": 1, - "maxItems": 9, - "items": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items/items", - "type": "string", - "title": "DimensionReference", - "examples": [ - "Operation" - ], - "pattern": "^(.*)$", - "minItems": 1 - } - } - }, - "Metrics": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics", - "type": "array", - "title": "MetricDefinitions", - "minItems": 1, - "items": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items", - "type": "object", - "title": "MetricDefinition", - "required": [ - "Name" - ], - "minItems": 1, - "properties": { - "Name": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Name", - "type": "string", - "title": "MetricName", - "examples": [ - "ProcessingLatency" - ], - "pattern": "^(.*)$", - "minLength": 1 - }, - "Unit": { - "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Unit", - "type": "string", - "title": "MetricUnit", - "examples": [ - "Milliseconds" - ], - "pattern": "^(Seconds|Microseconds|Milliseconds|Bytes|Kilobytes|Megabytes|Gigabytes|Terabytes|Bits|Kilobits|Megabits|Gigabits|Terabits|Percent|Count|Bytes\\/Second|Kilobytes\\/Second|Megabytes\\/Second|Gigabytes\\/Second|Terabytes\\/Second|Bits\\/Second|Kilobits\\/Second|Megabits\\/Second|Gigabits\\/Second|Terabits\\/Second|Count\\/Second|None)$" - } - } - } - } - } - } - } - } - } - } -} diff --git a/aws_lambda_powertools/shared/cache_dict.py b/aws_lambda_powertools/shared/cache_dict.py new file mode 100644 index 00000000000..d7184cc1e2b --- /dev/null +++ b/aws_lambda_powertools/shared/cache_dict.py @@ -0,0 +1,31 @@ +from collections import OrderedDict + + +class LRUDict(OrderedDict): + """ + Cache implementation based on ordered dict with a maximum number of items. Last accessed item will be evicted + first. Currently used by idempotency utility. + """ + + def __init__(self, max_items=1024, *args, **kwargs): + self.max_items = max_items + super().__init__(*args, **kwargs) + + def __getitem__(self, key): + value = super().__getitem__(key) + self.move_to_end(key) + return value + + def __setitem__(self, key, value): + if key in self: + self.move_to_end(key) + super().__setitem__(key, value) + if len(self) > self.max_items: + oldest = next(iter(self)) + del self[oldest] + + def get(self, key, *args, **kwargs): + item = super(LRUDict, self).get(key, *args, **kwargs) + if item: + self.move_to_end(key=key) + return item diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index c69d6b5ea49..eaad5640dfd 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -14,3 +14,7 @@ CHALICE_LOCAL_ENV: str = "AWS_CHALICE_CLI_MODE" SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" + + +XRAY_SDK_MODULE = "aws_xray_sdk" +XRAY_SDK_CORE_MODULE = "aws_xray_sdk.core" diff --git a/aws_lambda_powertools/utilities/validation/jmespath_functions.py b/aws_lambda_powertools/shared/jmespath_functions.py similarity index 100% rename from aws_lambda_powertools/utilities/validation/jmespath_functions.py rename to aws_lambda_powertools/shared/jmespath_functions.py diff --git a/aws_lambda_powertools/shared/json_encoder.py b/aws_lambda_powertools/shared/json_encoder.py new file mode 100644 index 00000000000..32a094abd85 --- /dev/null +++ b/aws_lambda_powertools/shared/json_encoder.py @@ -0,0 +1,16 @@ +import decimal +import json +import math + + +class Encoder(json.JSONEncoder): + """ + Custom JSON encoder to allow for serialization of Decimals, similar to the serializer used by Lambda internally. + """ + + def default(self, obj): + if isinstance(obj, decimal.Decimal): + if obj.is_nan(): + return math.nan + return str(obj) + return super().default(obj) diff --git a/aws_lambda_powertools/shared/lazy_import.py b/aws_lambda_powertools/shared/lazy_import.py new file mode 100644 index 00000000000..e860a650f31 --- /dev/null +++ b/aws_lambda_powertools/shared/lazy_import.py @@ -0,0 +1,55 @@ +# Copyright 2015 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""A LazyLoader class.""" + +import importlib +import types + + +class LazyLoader(types.ModuleType): + """Lazily import a module, mainly to avoid pulling in large dependencies. + + `contrib`, and `ffmpeg` are examples of modules that are large and not always + needed, and this allows them to only be loaded when they are used. + + Note: Subclassing types.ModuleType allow us to correctly adhere with sys.modules, import system + """ + + def __init__(self, local_name, parent_module_globals, name): # pylint: disable=super-on-old-class + self._local_name = local_name + self._parent_module_globals = parent_module_globals + + super(LazyLoader, self).__init__(name) + + def _load(self): + # Import the target module and insert it into the parent's namespace + module = importlib.import_module(self.__name__) + self._parent_module_globals[self._local_name] = module + + # Update this object's dict so that if someone keeps a reference to the + # LazyLoader, lookups are efficient (__getattr__ is only called on lookups + # that fail). + self.__dict__.update(module.__dict__) + + return module + + def __getattr__(self, item): + module = self._load() + return getattr(module, item) + + def __dir__(self): + module = self._load() + return dir(module) diff --git a/aws_lambda_powertools/tracing/base.py b/aws_lambda_powertools/tracing/base.py new file mode 100644 index 00000000000..1857ed52a73 --- /dev/null +++ b/aws_lambda_powertools/tracing/base.py @@ -0,0 +1,145 @@ +import abc +import numbers +import traceback +from contextlib import contextmanager +from typing import Any, AsyncContextManager, ContextManager, List, NoReturn, Set, Union + + +class BaseProvider(abc.ABC): + @abc.abstractmethod + @contextmanager + def in_subsegment(self, name=None, **kwargs) -> ContextManager: + """Return a subsegment context manger. + + Parameters + ---------- + name: str + Subsegment name + kwargs: Optional[dict] + Optional parameters to be propagated to segment + """ + + @abc.abstractmethod + @contextmanager + def in_subsegment_async(self, name=None, **kwargs) -> AsyncContextManager: + """Return a subsegment async context manger. + + Parameters + ---------- + name: str + Subsegment name + kwargs: Optional[dict] + Optional parameters to be propagated to segment + """ + + @abc.abstractmethod + def put_annotation(self, key: str, value: Union[str, numbers.Number, bool]) -> NoReturn: + """Annotate current active trace entity with a key-value pair. + + Note: Annotations will be indexed for later search query. + + Parameters + ---------- + key: str + Metadata key + value: Union[str, numbers.Number, bool] + Annotation value + """ + + @abc.abstractmethod + def put_metadata(self, key: str, value: Any, namespace: str = "default") -> NoReturn: + """Add metadata to the current active trace entity. + + Note: Metadata is not indexed but can be later retrieved by BatchGetTraces API. + + Parameters + ---------- + key: str + Metadata key + value: Any + Any object that can be serialized into a JSON string + namespace: Set[str] + Metadata namespace, by default 'default' + """ + + @abc.abstractmethod + def patch(self, modules: Set[str]) -> NoReturn: + """Instrument a set of supported libraries + + Parameters + ---------- + modules: Set[str] + Set of modules to be patched + """ + + @abc.abstractmethod + def patch_all(self) -> NoReturn: + """Instrument all supported libraries""" + + +class BaseSegment(abc.ABC): + """Holds common properties and methods on segment and subsegment.""" + + @abc.abstractmethod + def close(self, end_time: int = None): + """Close the trace entity by setting `end_time` + and flip the in progress flag to False. + + Parameters + ---------- + end_time: int + Time in epoch seconds, by default current time will be used. + """ + + @abc.abstractmethod + def add_subsegment(self, subsegment: Any): + """Add input subsegment as a child subsegment.""" + + @abc.abstractmethod + def remove_subsegment(self, subsegment: Any): + """Remove input subsegment from child subsegments.""" + + @abc.abstractmethod + def put_annotation(self, key: str, value: Union[str, numbers.Number, bool]) -> NoReturn: + """Annotate segment or subsegment with a key-value pair. + + Note: Annotations will be indexed for later search query. + + Parameters + ---------- + key: str + Metadata key + value: Union[str, numbers.Number, bool] + Annotation value + """ + + @abc.abstractmethod + def put_metadata(self, key: str, value: Any, namespace: str = "default") -> NoReturn: + """Add metadata to segment or subsegment. Metadata is not indexed + but can be later retrieved by BatchGetTraces API. + + Parameters + ---------- + key: str + Metadata key + value: Any + Any object that can be serialized into a JSON string + namespace: Set[str] + Metadata namespace, by default 'default' + """ + + @abc.abstractmethod + def add_exception(self, exception: BaseException, stack: List[traceback.StackSummary], remote: bool = False): + """Add an exception to trace entities. + + Parameters + ---------- + exception: Exception + Caught exception + stack: List[traceback.StackSummary] + List of traceback summaries + + Output from `traceback.extract_stack()`. + remote: bool + Whether it's a client error (False) or downstream service error (True), by default False + """ diff --git a/aws_lambda_powertools/tracing/extensions.py b/aws_lambda_powertools/tracing/extensions.py index 2bb0125e841..6c641238c98 100644 --- a/aws_lambda_powertools/tracing/extensions.py +++ b/aws_lambda_powertools/tracing/extensions.py @@ -8,8 +8,8 @@ def aiohttp_trace_config(): TraceConfig aiohttp trace config """ - from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config + from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config # pragma: no cover - aws_xray_trace_config.__doc__ = "aiohttp extension for X-Ray (aws_xray_trace_config)" + aws_xray_trace_config.__doc__ = "aiohttp extension for X-Ray (aws_xray_trace_config)" # pragma: no cover - return aws_xray_trace_config() + return aws_xray_trace_config() # pragma: no cover diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index bfd18be245a..f5b9ac92728 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -3,21 +3,20 @@ import functools import inspect import logging +import numbers import os -from typing import Any, Callable, Dict, List, Optional, Tuple - -import aws_xray_sdk -import aws_xray_sdk.core +from typing import Any, Callable, Dict, List, Optional, Tuple, Union from ..shared import constants from ..shared.functions import resolve_truthy_env_var_choice +from ..shared.lazy_import import LazyLoader +from .base import BaseProvider, BaseSegment is_cold_start = True logger = logging.getLogger(__name__) -# Set the streaming threshold to 0 on the default recorder to force sending -# subsegments individually, rather than batching them. -# See https://github.com/awslabs/aws-lambda-powertools-python/issues/283 -aws_xray_sdk.core.xray_recorder.configure(streaming_threshold=0) + +aws_xray_sdk = LazyLoader(constants.XRAY_SDK_MODULE, globals(), constants.XRAY_SDK_MODULE) +aws_xray_sdk.core = LazyLoader(constants.XRAY_SDK_CORE_MODULE, globals(), constants.XRAY_SDK_CORE_MODULE) class Tracer: @@ -139,7 +138,7 @@ def handler(event: dict, context: Any) -> Dict: "disabled": False, "auto_patch": True, "patch_modules": None, - "provider": aws_xray_sdk.core.xray_recorder, + "provider": None, } _config = copy.copy(_default_config) @@ -148,8 +147,8 @@ def __init__( service: str = None, disabled: bool = None, auto_patch: bool = None, - patch_modules: List = None, - provider: aws_xray_sdk.core.xray_recorder = None, + patch_modules: Optional[Tuple[str]] = None, + provider: BaseProvider = None, ): self.__build_config( service=service, disabled=disabled, auto_patch=auto_patch, patch_modules=patch_modules, provider=provider @@ -165,14 +164,19 @@ def __init__( if self.auto_patch: self.patch(modules=patch_modules) - def put_annotation(self, key: str, value: Any): + # Set the streaming threshold to 0 on the default recorder to force sending + # subsegments individually, rather than batching them. + # See https://github.com/awslabs/aws-lambda-powertools-python/issues/283 + aws_xray_sdk.core.xray_recorder.configure(streaming_threshold=0) # noqa: E800 + + def put_annotation(self, key: str, value: Union[str, numbers.Number, bool]): """Adds annotation to existing segment or subsegment Parameters ---------- key : str Annotation key - value : any + value : Union[str, numbers.Number, bool] Value for annotation Example @@ -238,7 +242,7 @@ def patch(self, modules: Tuple[str] = None): def capture_lambda_handler( self, - lambda_handler: Callable[[Dict, Any], Any] = None, + lambda_handler: Callable[[Dict, Any, Optional[Dict]], Any] = None, capture_response: Optional[bool] = None, capture_error: Optional[bool] = None, ): @@ -512,8 +516,8 @@ async def async_tasks(): def _decorate_async_function( self, method: Callable = None, - capture_response: Optional[bool] = None, - capture_error: Optional[bool] = None, + capture_response: Optional[Union[bool, str]] = None, + capture_error: Optional[Union[bool, str]] = None, method_name: str = None, ): @functools.wraps(method) @@ -539,8 +543,8 @@ async def decorate(*args, **kwargs): def _decorate_generator_function( self, method: Callable = None, - capture_response: Optional[bool] = None, - capture_error: Optional[bool] = None, + capture_response: Optional[Union[bool, str]] = None, + capture_error: Optional[Union[bool, str]] = None, method_name: str = None, ): @functools.wraps(method) @@ -566,8 +570,8 @@ def decorate(*args, **kwargs): def _decorate_generator_function_with_context_manager( self, method: Callable = None, - capture_response: Optional[bool] = None, - capture_error: Optional[bool] = None, + capture_response: Optional[Union[bool, str]] = None, + capture_error: Optional[Union[bool, str]] = None, method_name: str = None, ): @functools.wraps(method) @@ -594,8 +598,8 @@ def decorate(*args, **kwargs): def _decorate_sync_function( self, method: Callable = None, - capture_response: Optional[bool] = None, - capture_error: Optional[bool] = None, + capture_response: Optional[Union[bool, str]] = None, + capture_error: Optional[Union[bool, str]] = None, method_name: str = None, ): @functools.wraps(method) @@ -625,8 +629,8 @@ def _add_response_as_metadata( self, method_name: str = None, data: Any = None, - subsegment: aws_xray_sdk.core.models.subsegment = None, - capture_response: Optional[bool] = None, + subsegment: BaseSegment = None, + capture_response: Optional[Union[bool, str]] = None, ): """Add response as metadata for given subsegment @@ -636,7 +640,7 @@ def _add_response_as_metadata( method name to add as metadata key, by default None data : Any, optional data to add as subsegment metadata, by default None - subsegment : aws_xray_sdk.core.models.subsegment, optional + subsegment : BaseSegment, optional existing subsegment to add metadata on, by default None capture_response : bool, optional Do not include response as metadata @@ -650,7 +654,7 @@ def _add_full_exception_as_metadata( self, method_name: str = None, error: Exception = None, - subsegment: aws_xray_sdk.core.models.subsegment = None, + subsegment: BaseSegment = None, capture_error: Optional[bool] = None, ): """Add full exception object as metadata for given subsegment @@ -661,7 +665,7 @@ def _add_full_exception_as_metadata( method name to add as metadata key, by default None error : Exception, optional error to add as subsegment metadata, by default None - subsegment : aws_xray_sdk.core.models.subsegment, optional + subsegment : BaseSegment, optional existing subsegment to add metadata on, by default None capture_error : bool, optional Do not include error as metadata, by default True @@ -678,7 +682,7 @@ def _disable_tracer_provider(): aws_xray_sdk.global_sdk_config.set_sdk_enabled(False) @staticmethod - def _is_tracer_disabled() -> bool: + def _is_tracer_disabled() -> Union[bool, str]: """Detects whether trace has been disabled Tracing is automatically disabled in the following conditions: @@ -689,7 +693,7 @@ def _is_tracer_disabled() -> bool: Returns ------- - bool + Union[bool, str] """ logger.debug("Verifying whether Tracing has been disabled") is_lambda_sam_cli = os.getenv(constants.SAM_LOCAL_ENV) @@ -712,13 +716,13 @@ def __build_config( disabled: bool = None, auto_patch: bool = None, patch_modules: List = None, - provider: aws_xray_sdk.core.xray_recorder = None, + provider: BaseProvider = None, ): """ Populates Tracer config for new and existing initializations """ is_disabled = disabled if disabled is not None else self._is_tracer_disabled() is_service = service if service is not None else os.getenv(constants.SERVICE_NAME_ENV) - self._config["provider"] = provider if provider is not None else self._config["provider"] + self._config["provider"] = provider or self._config["provider"] or aws_xray_sdk.core.xray_recorder self._config["auto_patch"] = auto_patch if auto_patch is not None else self._config["auto_patch"] self._config["service"] = is_service or self._config["service"] self._config["disabled"] = is_disabled or self._config["disabled"] diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index 47ca29c2148..9c74983f3a9 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -1,6 +1,7 @@ from .alb_event import ALBEvent from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2 from .cloud_watch_logs_event import CloudWatchLogsEvent +from .connect_contact_flow_event import ConnectContactFlowEvent from .dynamo_db_stream_event import DynamoDBStreamEvent from .event_bridge_event import EventBridgeEvent from .kinesis_stream_event import KinesisStreamEvent @@ -14,6 +15,7 @@ "APIGatewayProxyEventV2", "ALBEvent", "CloudWatchLogsEvent", + "ConnectContactFlowEvent", "DynamoDBStreamEvent", "EventBridgeEvent", "KinesisStreamEvent", diff --git a/aws_lambda_powertools/utilities/data_classes/connect_contact_flow_event.py b/aws_lambda_powertools/utilities/data_classes/connect_contact_flow_event.py new file mode 100644 index 00000000000..79f086ac1e2 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/connect_contact_flow_event.py @@ -0,0 +1,166 @@ +from enum import Enum, auto +from typing import Dict, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class ConnectContactFlowChannel(Enum): + VOICE = auto() + CHAT = auto() + + +class ConnectContactFlowEndpointType(Enum): + TELEPHONE_NUMBER = auto() + + +class ConnectContactFlowInitiationMethod(Enum): + INBOUND = auto() + OUTBOUND = auto() + TRANSFER = auto() + CALLBACK = auto() + API = auto() + + +class ConnectContactFlowEndpoint(DictWrapper): + @property + def address(self) -> str: + """The phone number.""" + return self["Address"] + + @property + def endpoint_type(self) -> ConnectContactFlowEndpointType: + """The enpoint type.""" + return ConnectContactFlowEndpointType[self["Type"]] + + +class ConnectContactFlowQueue(DictWrapper): + @property + def arn(self) -> str: + """The unique queue ARN.""" + return self["ARN"] + + @property + def name(self) -> str: + """The queue name.""" + return self["Name"] + + +class ConnectContactFlowMediaStreamAudio(DictWrapper): + @property + def start_fragment_number(self) -> Optional[str]: + """The number that identifies the Kinesis Video Streams fragment, in the stream used for Live media streaming, + in which the customer audio stream started. + """ + return self["StartFragmentNumber"] + + @property + def start_timestamp(self) -> Optional[str]: + """When the customer audio stream started.""" + return self["StartTimestamp"] + + @property + def stream_arn(self) -> Optional[str]: + """The ARN of the Kinesis Video stream used for Live media streaming that includes the customer data to + reference. + """ + return self["StreamARN"] + + +class ConnectContactFlowMediaStreamCustomer(DictWrapper): + @property + def audio(self) -> ConnectContactFlowMediaStreamAudio: + return ConnectContactFlowMediaStreamAudio(self["Audio"]) + + +class ConnectContactFlowMediaStreams(DictWrapper): + @property + def customer(self) -> ConnectContactFlowMediaStreamCustomer: + return ConnectContactFlowMediaStreamCustomer(self["Customer"]) + + +class ConnectContactFlowData(DictWrapper): + @property + def attributes(self) -> Dict[str, str]: + """These are attributes that have been previously associated with a contact, + such as when using a Set contact attributes block in a contact flow. + This map may be empty if there aren't any saved attributes. + """ + return self["Attributes"] + + @property + def channel(self) -> ConnectContactFlowChannel: + """The method used to contact your contact center.""" + return ConnectContactFlowChannel[self["Channel"]] + + @property + def contact_id(self) -> str: + """The unique identifier of the contact.""" + return self["ContactId"] + + @property + def customer_endpoint(self) -> Optional[ConnectContactFlowEndpoint]: + """Contains the customer’s address (number) and type of address.""" + if self["CustomerEndpoint"] is not None: + return ConnectContactFlowEndpoint(self["CustomerEndpoint"]) + return None + + @property + def initial_contact_id(self) -> str: + """The unique identifier for the contact associated with the first interaction between the customer and your + contact center. Use the initial contact ID to track contacts between contact flows. + """ + return self["InitialContactId"] + + @property + def initiation_method(self) -> ConnectContactFlowInitiationMethod: + """How the contact was initiated.""" + return ConnectContactFlowInitiationMethod[self["InitiationMethod"]] + + @property + def instance_arn(self) -> str: + """The ARN for your Amazon Connect instance.""" + return self["InstanceARN"] + + @property + def previous_contact_id(self) -> str: + """The unique identifier for the contact before it was transferred. + Use the previous contact ID to trace contacts between contact flows. + """ + return self["PreviousContactId"] + + @property + def queue(self) -> Optional[ConnectContactFlowQueue]: + """The current queue.""" + if self["Queue"] is not None: + return ConnectContactFlowQueue(self["Queue"]) + return None + + @property + def system_endpoint(self) -> Optional[ConnectContactFlowEndpoint]: + """Contains the address (number) the customer dialed to call your contact center and type of address.""" + if self["SystemEndpoint"] is not None: + return ConnectContactFlowEndpoint(self["SystemEndpoint"]) + return None + + @property + def media_streams(self) -> ConnectContactFlowMediaStreams: + return ConnectContactFlowMediaStreams(self["MediaStreams"]) + + +class ConnectContactFlowEvent(DictWrapper): + """Amazon Connect contact flow event + + Documentation: + ------------- + - https://docs.aws.amazon.com/connect/latest/adminguide/connect-lambda-functions.html + """ + + @property + def contact_data(self) -> ConnectContactFlowData: + """This is always passed by Amazon Connect for every contact. Some parameters are optional.""" + return ConnectContactFlowData(self["Details"]["ContactData"]) + + @property + def parameters(self) -> Dict[str, str]: + """These are parameters specific to this call that were defined when you created the Lambda function.""" + return self["Details"]["Parameters"] diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py new file mode 100644 index 00000000000..b46d0855a93 --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -0,0 +1,10 @@ +""" +Utility for adding idempotency to lambda functions +""" + +from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.persistence.dynamodb import DynamoDBPersistenceLayer + +from .idempotency import IdempotencyConfig, idempotent + +__all__ = ("DynamoDBPersistenceLayer", "BasePersistenceLayer", "idempotent", "IdempotencyConfig") diff --git a/aws_lambda_powertools/utilities/idempotency/config.py b/aws_lambda_powertools/utilities/idempotency/config.py new file mode 100644 index 00000000000..52afb3bad8c --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/config.py @@ -0,0 +1,43 @@ +from typing import Dict + + +class IdempotencyConfig: + def __init__( + self, + event_key_jmespath: str = "", + payload_validation_jmespath: str = "", + jmespath_options: Dict = None, + raise_on_no_idempotency_key: bool = False, + expires_after_seconds: int = 60 * 60, # 1 hour default + use_local_cache: bool = False, + local_cache_max_items: int = 256, + hash_function: str = "md5", + ): + """ + Initialize the base persistence layer + + Parameters + ---------- + event_key_jmespath: str + A jmespath expression to extract the idempotency key from the event record + payload_validation_jmespath: str + A jmespath expression to extract the payload to be validated from the event record + raise_on_no_idempotency_key: bool, optional + Raise exception if no idempotency key was found in the request, by default False + expires_after_seconds: int + The number of seconds to wait before a record is expired + use_local_cache: bool, optional + Whether to locally cache idempotency results, by default False + local_cache_max_items: int, optional + Max number of items to store in local cache, by default 1024 + hash_function: str, optional + Function to use for calculating hashes, by default md5. + """ + self.event_key_jmespath = event_key_jmespath + self.payload_validation_jmespath = payload_validation_jmespath + self.jmespath_options = jmespath_options + self.raise_on_no_idempotency_key = raise_on_no_idempotency_key + self.expires_after_seconds = expires_after_seconds + self.use_local_cache = use_local_cache + self.local_cache_max_items = local_cache_max_items + self.hash_function = hash_function diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py new file mode 100644 index 00000000000..6c7318ebca0 --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -0,0 +1,51 @@ +""" +Idempotency errors +""" + + +class IdempotencyItemAlreadyExistsError(Exception): + """ + Item attempting to be inserted into persistence store already exists and is not expired + """ + + +class IdempotencyItemNotFoundError(Exception): + """ + Item does not exist in persistence store + """ + + +class IdempotencyAlreadyInProgressError(Exception): + """ + Execution with idempotency key is already in progress + """ + + +class IdempotencyInvalidStatusError(Exception): + """ + An invalid status was provided + """ + + +class IdempotencyValidationError(Exception): + """ + Payload does not match stored idempotency record + """ + + +class IdempotencyInconsistentStateError(Exception): + """ + State is inconsistent across multiple requests to persistence store + """ + + +class IdempotencyPersistenceLayerError(Exception): + """ + Unrecoverable error from the data store + """ + + +class IdempotencyKeyError(Exception): + """ + Payload does not contain a idempotent key + """ diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py new file mode 100644 index 00000000000..06d77a9fd72 --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py @@ -0,0 +1,234 @@ +""" +Primary interface for idempotent Lambda functions utility +""" +import logging +from typing import Any, Callable, Dict, Optional + +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyAlreadyInProgressError, + IdempotencyInconsistentStateError, + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, + IdempotencyPersistenceLayerError, + IdempotencyValidationError, +) +from aws_lambda_powertools.utilities.idempotency.persistence.base import ( + STATUS_CONSTANTS, + BasePersistenceLayer, + DataRecord, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = logging.getLogger(__name__) + + +@lambda_handler_decorator +def idempotent( + handler: Callable[[Any, LambdaContext], Any], + event: Dict[str, Any], + context: LambdaContext, + persistence_store: BasePersistenceLayer, + config: IdempotencyConfig = None, +) -> Any: + """ + Middleware to handle idempotency + + Parameters + ---------- + handler: Callable + Lambda's handler + event: Dict + Lambda's Event + context: Dict + Lambda's Context + persistence_store: BasePersistenceLayer + Instance of BasePersistenceLayer to store data + config: IdempotencyConfig + Configuration + + Examples + -------- + **Processes Lambda's event in an idempotent manner** + >>> from aws_lambda_powertools.utilities.idempotency import ( + >>> idempotent, DynamoDBPersistenceLayer, IdempotencyConfig + >>> ) + >>> + >>> idem_config=IdempotencyConfig(event_key_jmespath="body") + >>> persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency_store") + >>> + >>> @idempotent(config=idem_config, persistence_store=persistence_layer) + >>> def handler(event, context): + >>> return {"StatusCode": 200} + """ + + config = config or IdempotencyConfig() + idempotency_handler = IdempotencyHandler( + lambda_handler=handler, event=event, context=context, persistence_store=persistence_store, config=config + ) + + # IdempotencyInconsistentStateError can happen under rare but expected cases when persistent state changes in the + # small time between put & get requests. In most cases we can retry successfully on this exception. + # Maintenance: Allow customers to specify number of retries + max_handler_retries = 2 + for i in range(max_handler_retries + 1): + try: + return idempotency_handler.handle() + except IdempotencyInconsistentStateError: + if i < max_handler_retries: + continue + else: + # Allow the exception to bubble up after max retries exceeded + raise + + +class IdempotencyHandler: + """ + Class to orchestrate calls to persistence layer. + """ + + def __init__( + self, + lambda_handler: Callable[[Any, LambdaContext], Any], + event: Dict[str, Any], + context: LambdaContext, + config: IdempotencyConfig, + persistence_store: BasePersistenceLayer, + ): + """ + Initialize the IdempotencyHandler + + Parameters + ---------- + lambda_handler : Callable[[Any, LambdaContext], Any] + Lambda function handler + event : Dict[str, Any] + Event payload lambda handler will be called with + context : LambdaContext + Context object which will be passed to lambda handler + persistence_store : BasePersistenceLayer + Instance of persistence layer to store idempotency records + """ + persistence_store.configure(config) + self.persistence_store = persistence_store + self.context = context + self.event = event + self.lambda_handler = lambda_handler + self.max_handler_retries = 2 + + def handle(self) -> Any: + """ + Main entry point for handling idempotent execution of lambda handler. + + Returns + ------- + Any + lambda handler response + + """ + try: + # We call save_inprogress first as an optimization for the most common case where no idempotent record + # already exists. If it succeeds, there's no need to call get_record. + self.persistence_store.save_inprogress(event=self.event) + except IdempotencyItemAlreadyExistsError: + # Now we know the item already exists, we can retrieve it + record = self._get_idempotency_record() + return self._handle_for_status(record) + + return self._call_lambda_handler() + + def _get_idempotency_record(self) -> DataRecord: + """ + Retrieve the idempotency record from the persistence layer. + + Raises + ---------- + IdempotencyInconsistentStateError + + """ + try: + event_record = self.persistence_store.get_record(self.event) + except IdempotencyItemNotFoundError: + # This code path will only be triggered if the record is removed between save_inprogress and get_record. + logger.debug( + "An existing idempotency record was deleted before we could retrieve it. Proceeding with lambda " + "handler" + ) + raise IdempotencyInconsistentStateError("save_inprogress and get_record return inconsistent results.") + + # Allow this exception to bubble up + except IdempotencyValidationError: + raise + + # Wrap remaining unhandled exceptions with IdempotencyPersistenceLayerError to ease exception handling for + # clients + except Exception as exc: + raise IdempotencyPersistenceLayerError("Failed to get record from idempotency store") from exc + + return event_record + + def _handle_for_status(self, event_record: DataRecord) -> Optional[Dict[Any, Any]]: + """ + Take appropriate action based on event_record's status + + Parameters + ---------- + event_record: DataRecord + + Returns + ------- + Optional[Dict[Any, Any] + Lambda response previously used for this idempotency key, if it has successfully executed already. + + Raises + ------ + AlreadyInProgressError + A lambda execution is already in progress + IdempotencyInconsistentStateError + The persistence store reports inconsistent states across different requests. Retryable. + """ + # This code path will only be triggered if the record becomes expired between the save_inprogress call and here + if event_record.status == STATUS_CONSTANTS["EXPIRED"]: + raise IdempotencyInconsistentStateError("save_inprogress and get_record return inconsistent results.") + + if event_record.status == STATUS_CONSTANTS["INPROGRESS"]: + raise IdempotencyAlreadyInProgressError( + f"Execution already in progress with idempotency key: " + f"{self.persistence_store.event_key_jmespath}={event_record.idempotency_key}" + ) + + return event_record.response_json_as_dict() + + def _call_lambda_handler(self) -> Any: + """ + Call the lambda handler function and update the persistence store appropriate depending on the output + + Returns + ------- + Any + lambda handler response + + """ + try: + handler_response = self.lambda_handler(self.event, self.context) + except Exception as handler_exception: + # We need these nested blocks to preserve lambda handler exception in case the persistence store operation + # also raises an exception + try: + self.persistence_store.delete_record(event=self.event, exception=handler_exception) + except Exception as delete_exception: + raise IdempotencyPersistenceLayerError( + "Failed to delete record from idempotency store" + ) from delete_exception + raise + + else: + try: + self.persistence_store.save_success(event=self.event, result=handler_response) + except Exception as save_exception: + raise IdempotencyPersistenceLayerError( + "Failed to update record state to success in idempotency store" + ) from save_exception + + return handler_response diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/__init__.py b/aws_lambda_powertools/utilities/idempotency/persistence/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py new file mode 100644 index 00000000000..58f67a292e7 --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -0,0 +1,466 @@ +""" +Persistence layers supporting idempotency +""" + +import datetime +import hashlib +import json +import logging +import warnings +from abc import ABC, abstractmethod +from types import MappingProxyType +from typing import Any, Dict, Optional + +import jmespath + +from aws_lambda_powertools.shared.cache_dict import LRUDict +from aws_lambda_powertools.shared.jmespath_functions import PowertoolsFunctions +from aws_lambda_powertools.shared.json_encoder import Encoder +from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyInvalidStatusError, + IdempotencyItemAlreadyExistsError, + IdempotencyKeyError, + IdempotencyValidationError, +) + +logger = logging.getLogger(__name__) + +STATUS_CONSTANTS = MappingProxyType({"INPROGRESS": "INPROGRESS", "COMPLETED": "COMPLETED", "EXPIRED": "EXPIRED"}) + + +class DataRecord: + """ + Data Class for idempotency records. + """ + + def __init__( + self, + idempotency_key, + status: str = "", + expiry_timestamp: int = None, + response_data: str = "", + payload_hash: str = None, + ) -> None: + """ + + Parameters + ---------- + idempotency_key: str + hashed representation of the idempotent data + status: str, optional + status of the idempotent record + expiry_timestamp: int, optional + time before the record should expire, in seconds + payload_hash: str, optional + hashed representation of payload + response_data: str, optional + response data from previous executions using the record + """ + self.idempotency_key = idempotency_key + self.payload_hash = payload_hash + self.expiry_timestamp = expiry_timestamp + self._status = status + self.response_data = response_data + + @property + def is_expired(self) -> bool: + """ + Check if data record is expired + + Returns + ------- + bool + Whether the record is currently expired or not + """ + return bool(self.expiry_timestamp and int(datetime.datetime.now().timestamp()) > self.expiry_timestamp) + + @property + def status(self) -> str: + """ + Get status of data record + + Returns + ------- + str + """ + if self.is_expired: + return STATUS_CONSTANTS["EXPIRED"] + elif self._status in STATUS_CONSTANTS.values(): + return self._status + else: + raise IdempotencyInvalidStatusError(self._status) + + def response_json_as_dict(self) -> dict: + """ + Get response data deserialized to python dict + + Returns + ------- + dict + previous response data deserialized + """ + return json.loads(self.response_data) + + +class BasePersistenceLayer(ABC): + """ + Abstract Base Class for Idempotency persistence layer. + """ + + def __init__(self): + """Initialize the defaults """ + self.configured = False + self.event_key_jmespath: Optional[str] = None + self.event_key_compiled_jmespath = None + self.jmespath_options: Optional[dict] = None + self.payload_validation_enabled = False + self.validation_key_jmespath = None + self.raise_on_no_idempotency_key = False + self.expires_after_seconds: int = 60 * 60 # 1 hour default + self.use_local_cache = False + self._cache: Optional[LRUDict] = None + self.hash_function = None + + def configure(self, config: IdempotencyConfig) -> None: + """ + Initialize the base persistence layer from the configuration settings + + Parameters + ---------- + config: IdempotencyConfig + Idempotency configuration settings + """ + if self.configured: + # Prevent being reconfigured multiple times + return + self.configured = True + + self.event_key_jmespath = config.event_key_jmespath + if config.event_key_jmespath: + self.event_key_compiled_jmespath = jmespath.compile(config.event_key_jmespath) + self.jmespath_options = config.jmespath_options + if not self.jmespath_options: + self.jmespath_options = {"custom_functions": PowertoolsFunctions()} + if config.payload_validation_jmespath: + self.validation_key_jmespath = jmespath.compile(config.payload_validation_jmespath) + self.payload_validation_enabled = True + self.raise_on_no_idempotency_key = config.raise_on_no_idempotency_key + self.expires_after_seconds = config.expires_after_seconds + self.use_local_cache = config.use_local_cache + if self.use_local_cache: + self._cache = LRUDict(max_items=config.local_cache_max_items) + self.hash_function = getattr(hashlib, config.hash_function) + + def _get_hashed_idempotency_key(self, lambda_event: Dict[str, Any]) -> str: + """ + Extract data from lambda event using event key jmespath, and return a hashed representation + + Parameters + ---------- + lambda_event: Dict[str, Any] + Lambda event + + Returns + ------- + str + Hashed representation of the data extracted by the jmespath expression + + """ + data = lambda_event + + if self.event_key_jmespath: + data = self.event_key_compiled_jmespath.search( + lambda_event, options=jmespath.Options(**self.jmespath_options) + ) + + if self.is_missing_idempotency_key(data): + if self.raise_on_no_idempotency_key: + raise IdempotencyKeyError("No data found to create a hashed idempotency_key") + warnings.warn(f"No value found for idempotency_key. jmespath: {self.event_key_jmespath}") + + return self._generate_hash(data) + + @staticmethod + def is_missing_idempotency_key(data) -> bool: + return data is None or not data or all(x is None for x in data) + + def _get_hashed_payload(self, lambda_event: Dict[str, Any]) -> str: + """ + Extract data from lambda event using validation key jmespath, and return a hashed representation + + Parameters + ---------- + lambda_event: Dict[str, Any] + Lambda event + + Returns + ------- + str + Hashed representation of the data extracted by the jmespath expression + + """ + if not self.payload_validation_enabled: + return "" + data = self.validation_key_jmespath.search(lambda_event) + return self._generate_hash(data) + + def _generate_hash(self, data: Any) -> str: + """ + Generate a hash value from the provided data + + Parameters + ---------- + data: Any + The data to hash + + Returns + ------- + str + Hashed representation of the provided data + + """ + hashed_data = self.hash_function(json.dumps(data, cls=Encoder).encode()) + return hashed_data.hexdigest() + + def _validate_payload(self, lambda_event: Dict[str, Any], data_record: DataRecord) -> None: + """ + Validate that the hashed payload matches in the lambda event and stored data record + + Parameters + ---------- + lambda_event: Dict[str, Any] + Lambda event + data_record: DataRecord + DataRecord instance + + Raises + ---------- + IdempotencyValidationError + Event payload doesn't match the stored record for the given idempotency key + + """ + if self.payload_validation_enabled: + lambda_payload_hash = self._get_hashed_payload(lambda_event) + if data_record.payload_hash != lambda_payload_hash: + raise IdempotencyValidationError("Payload does not match stored record for this event key") + + def _get_expiry_timestamp(self) -> int: + """ + + Returns + ------- + int + unix timestamp of expiry date for idempotency record + + """ + now = datetime.datetime.now() + period = datetime.timedelta(seconds=self.expires_after_seconds) + return int((now + period).timestamp()) + + def _save_to_cache(self, data_record: DataRecord): + """ + Save data_record to local cache except when status is "INPROGRESS" + + NOTE: We can't cache "INPROGRESS" records as we have no way to reflect updates that can happen outside of the + execution environment + + Parameters + ---------- + data_record: DataRecord + DataRecord instance + + Returns + ------- + + """ + if not self.use_local_cache: + return + if data_record.status == STATUS_CONSTANTS["INPROGRESS"]: + return + self._cache[data_record.idempotency_key] = data_record + + def _retrieve_from_cache(self, idempotency_key: str): + if not self.use_local_cache: + return + cached_record = self._cache.get(idempotency_key) + if cached_record: + if not cached_record.is_expired: + return cached_record + logger.debug(f"Removing expired local cache record for idempotency key: {idempotency_key}") + self._delete_from_cache(idempotency_key) + + def _delete_from_cache(self, idempotency_key: str): + if not self.use_local_cache: + return + if idempotency_key in self._cache: + del self._cache[idempotency_key] + + def save_success(self, event: Dict[str, Any], result: dict) -> None: + """ + Save record of function's execution completing successfully + + Parameters + ---------- + event: Dict[str, Any] + Lambda event + result: dict + The response from lambda handler + """ + response_data = json.dumps(result, cls=Encoder) + + data_record = DataRecord( + idempotency_key=self._get_hashed_idempotency_key(event), + status=STATUS_CONSTANTS["COMPLETED"], + expiry_timestamp=self._get_expiry_timestamp(), + response_data=response_data, + payload_hash=self._get_hashed_payload(event), + ) + logger.debug( + f"Lambda successfully executed. Saving record to persistence store with " + f"idempotency key: {data_record.idempotency_key}" + ) + self._update_record(data_record=data_record) + + self._save_to_cache(data_record) + + def save_inprogress(self, event: Dict[str, Any]) -> None: + """ + Save record of function's execution being in progress + + Parameters + ---------- + event: Dict[str, Any] + Lambda event + """ + data_record = DataRecord( + idempotency_key=self._get_hashed_idempotency_key(event), + status=STATUS_CONSTANTS["INPROGRESS"], + expiry_timestamp=self._get_expiry_timestamp(), + payload_hash=self._get_hashed_payload(event), + ) + + logger.debug(f"Saving in progress record for idempotency key: {data_record.idempotency_key}") + + if self._retrieve_from_cache(idempotency_key=data_record.idempotency_key): + raise IdempotencyItemAlreadyExistsError + + self._put_record(data_record) + + def delete_record(self, event: Dict[str, Any], exception: Exception): + """ + Delete record from the persistence store + + Parameters + ---------- + event: Dict[str, Any] + Lambda event + exception + The exception raised by the lambda handler + """ + data_record = DataRecord(idempotency_key=self._get_hashed_idempotency_key(event)) + + logger.debug( + f"Lambda raised an exception ({type(exception).__name__}). Clearing in progress record in persistence " + f"store for idempotency key: {data_record.idempotency_key}" + ) + self._delete_record(data_record) + + self._delete_from_cache(data_record.idempotency_key) + + def get_record(self, event: Dict[str, Any]) -> DataRecord: + """ + Calculate idempotency key for lambda_event, then retrieve item from persistence store using idempotency key + and return it as a DataRecord instance.and return it as a DataRecord instance. + + Parameters + ---------- + event: Dict[str, Any] + + Returns + ------- + DataRecord + DataRecord representation of existing record found in persistence store + + Raises + ------ + IdempotencyItemNotFoundError + Exception raised if no record exists in persistence store with the idempotency key + IdempotencyValidationError + Event payload doesn't match the stored record for the given idempotency key + """ + + idempotency_key = self._get_hashed_idempotency_key(event) + + cached_record = self._retrieve_from_cache(idempotency_key=idempotency_key) + if cached_record: + logger.debug(f"Idempotency record found in cache with idempotency key: {idempotency_key}") + self._validate_payload(event, cached_record) + return cached_record + + record = self._get_record(idempotency_key) + + self._save_to_cache(data_record=record) + + self._validate_payload(event, record) + return record + + @abstractmethod + def _get_record(self, idempotency_key) -> DataRecord: + """ + Retrieve item from persistence store using idempotency key and return it as a DataRecord instance. + + Parameters + ---------- + idempotency_key + + Returns + ------- + DataRecord + DataRecord representation of existing record found in persistence store + + Raises + ------ + IdempotencyItemNotFoundError + Exception raised if no record exists in persistence store with the idempotency key + """ + raise NotImplementedError + + @abstractmethod + def _put_record(self, data_record: DataRecord) -> None: + """ + Add a DataRecord to persistence store if it does not already exist with that key. Raise ItemAlreadyExists + if a non-expired entry already exists. + + Parameters + ---------- + data_record: DataRecord + DataRecord instance + """ + + raise NotImplementedError + + @abstractmethod + def _update_record(self, data_record: DataRecord) -> None: + """ + Update item in persistence store + + Parameters + ---------- + data_record: DataRecord + DataRecord instance + """ + + raise NotImplementedError + + @abstractmethod + def _delete_record(self, data_record: DataRecord) -> None: + """ + Remove item from persistence store + Parameters + ---------- + data_record: DataRecord + DataRecord instance + """ + + raise NotImplementedError diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py new file mode 100644 index 00000000000..d87cd71ff4e --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -0,0 +1,161 @@ +import datetime +import logging +from typing import Any, Dict, Optional + +import boto3 +from botocore.config import Config + +from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, +) +from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord + +logger = logging.getLogger(__name__) + + +class DynamoDBPersistenceLayer(BasePersistenceLayer): + def __init__( + self, + table_name: str, + key_attr: str = "id", + expiry_attr: str = "expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + boto_config: Optional[Config] = None, + boto3_session: Optional[boto3.session.Session] = None, + ): + """ + Initialize the DynamoDB client + + Parameters + ---------- + table_name: str + Name of the table to use for storing execution records + key_attr: str, optional + DynamoDB attribute name for key, by default "id" + expiry_attr: str, optional + DynamoDB attribute name for expiry timestamp, by default "expiration" + status_attr: str, optional + DynamoDB attribute name for status, by default "status" + data_attr: str, optional + DynamoDB attribute name for response data, by default "data" + boto_config: botocore.config.Config, optional + Botocore configuration to pass during client initialization + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication + + args + kwargs + + Examples + -------- + **Create a DynamoDB persistence layer with custom settings** + >>> from aws_lambda_powertools.utilities.idempotency import idempotent, DynamoDBPersistenceLayer + >>> + >>> persistence_store = DynamoDBPersistenceLayer(table_name="idempotency_store") + >>> + >>> @idempotent(persistence_store=persistence_store, event_key="body") + >>> def handler(event, context): + >>> return {"StatusCode": 200} + """ + + boto_config = boto_config or Config() + session = boto3_session or boto3.session.Session() + self._ddb_resource = session.resource("dynamodb", config=boto_config) + self.table_name = table_name + self.table = self._ddb_resource.Table(self.table_name) + self.key_attr = key_attr + self.expiry_attr = expiry_attr + self.status_attr = status_attr + self.data_attr = data_attr + self.validation_key_attr = validation_key_attr + super(DynamoDBPersistenceLayer, self).__init__() + + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + """ + Translate raw item records from DynamoDB to DataRecord + + Parameters + ---------- + item: Dict[str, Union[str, int]] + Item format from dynamodb response + + Returns + ------- + DataRecord + representation of item + + """ + return DataRecord( + idempotency_key=item[self.key_attr], + status=item[self.status_attr], + expiry_timestamp=item[self.expiry_attr], + response_data=item.get(self.data_attr), + payload_hash=item.get(self.validation_key_attr), + ) + + def _get_record(self, idempotency_key) -> DataRecord: + response = self.table.get_item(Key={self.key_attr: idempotency_key}, ConsistentRead=True) + + try: + item = response["Item"] + except KeyError: + raise IdempotencyItemNotFoundError + return self._item_to_data_record(item) + + def _put_record(self, data_record: DataRecord) -> None: + item = { + self.key_attr: data_record.idempotency_key, + self.expiry_attr: data_record.expiry_timestamp, + self.status_attr: data_record.status, + } + + if self.payload_validation_enabled: + item[self.validation_key_attr] = data_record.payload_hash + + now = datetime.datetime.now() + try: + logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") + self.table.put_item( + Item=item, + ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", + ExpressionAttributeValues={":now": int(now.timestamp())}, + ) + except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: + logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") + raise IdempotencyItemAlreadyExistsError + + def _update_record(self, data_record: DataRecord): + logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + update_expression = "SET #response_data = :response_data, #expiry = :expiry, #status = :status" + expression_attr_values = { + ":expiry": data_record.expiry_timestamp, + ":response_data": data_record.response_data, + ":status": data_record.status, + } + expression_attr_names = { + "#response_data": self.data_attr, + "#expiry": self.expiry_attr, + "#status": self.status_attr, + } + + if self.payload_validation_enabled: + update_expression += ", #validation_key = :validation_key" + expression_attr_values[":validation_key"] = data_record.payload_hash + expression_attr_names["#validation_key"] = self.validation_key_attr + + kwargs = { + "Key": {self.key_attr: data_record.idempotency_key}, + "UpdateExpression": update_expression, + "ExpressionAttributeValues": expression_attr_values, + "ExpressionAttributeNames": expression_attr_names, + } + + self.table.update_item(**kwargs) + + def _delete_record(self, data_record: DataRecord) -> None: + logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + self.table.delete_item(Key={self.key_attr: data_record.idempotency_key},) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/base.py b/aws_lambda_powertools/utilities/parser/envelopes/base.py index 484d589fcc8..06e78160d87 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/base.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/base.py @@ -59,5 +59,5 @@ def parse(...): # Generic to support type annotations throughout parser -# Note: Can't be defined under types.py due to circular dependency +# Note: Can't be defined under base.py due to circular dependency Envelope = TypeVar("Envelope", bound=BaseEnvelope) diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py index bacd25a4efa..a5c82503735 100644 --- a/aws_lambda_powertools/utilities/validation/base.py +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -5,8 +5,9 @@ import jmespath from jmespath.exceptions import LexerError +from aws_lambda_powertools.shared.jmespath_functions import PowertoolsFunctions + from .exceptions import InvalidEnvelopeExpressionError, InvalidSchemaFormatError, SchemaValidationError -from .jmespath_functions import PowertoolsFunctions logger = logging.getLogger(__name__) diff --git a/docs/diagram_src/idempotent_sequence.puml b/docs/diagram_src/idempotent_sequence.puml new file mode 100644 index 00000000000..76c85942796 --- /dev/null +++ b/docs/diagram_src/idempotent_sequence.puml @@ -0,0 +1,29 @@ +@startuml +'https://plantuml.com/sequence-diagram + +participant Client +participant Lambda +participant "Persistence layer" + + +group initial request +Client->Lambda:Invoke (event) +Lambda->"Persistence layer":Get or set (id=event.search(payload)) +activate "Persistence layer" +note right of "Persistence layer":Locked during this time. Prevents \nmultiple Lambda invocations with the \nsame payload running concurrently. +Lambda-->Lambda:Run Lambda handler (event) +Lambda->"Persistence layer":Update record with Lambda handler result¹ +deactivate "Persistence layer" +"Persistence layer"-->"Persistence layer": Update record with result¹ +Client x<--Lambda:Response not received by client +end + +group retried request + +Client->Lambda: Invoke (event) +Lambda->"Persistence layer":Get or set (id=event.search(payload)) +Lambda<--"Persistence layer":Already exists in persistence layer. Return result¹ +Client<--Lambda:Response sent to client +end + +@enduml diff --git a/docs/diagram_src/idempotent_sequence_exception.puml b/docs/diagram_src/idempotent_sequence_exception.puml new file mode 100644 index 00000000000..7470cdd1c4e --- /dev/null +++ b/docs/diagram_src/idempotent_sequence_exception.puml @@ -0,0 +1,18 @@ +@startuml +'https://plantuml.com/sequence-diagram + +participant Client +participant Lambda +participant "Persistence layer" + + +Client->Lambda:Invoke (event) +Lambda->"Persistence layer":Get or set (id=event.search(payload)) +activate "Persistence layer" +note right of "Persistence layer":Locked during this time. Prevents \nmultiple Lambda invocations with the \nsame payload running concurrently. +Lambda-->x Lambda:Run Lambda handler (event). Raises Exception. +Lambda->"Persistence layer":Delete record (id=event.search(payload)) +deactivate "Persistence layer" +Client<--Lambda:Return error response + +@enduml diff --git a/docs/index.md b/docs/index.md index 2415a668ec1..cddf3182695 100644 --- a/docs/index.md +++ b/docs/index.md @@ -152,6 +152,7 @@ aws serverlessrepo list-application-versions \ | [Validation](./utilities/validation) | JSON Schema validator for inbound events and responses | [Event source data classes](./utilities/data_classes) | Data classes describing the schema of common Lambda event triggers | [Parser](./utilities/parser) | Data parsing and deep validation using Pydantic +| [Idempotency](./utilities/idempotency) | Idempotent Lambda handler ## Environment variables diff --git a/docs/media/idempotent_sequence.png b/docs/media/idempotent_sequence.png new file mode 100644 index 00000000000..92593184abb Binary files /dev/null and b/docs/media/idempotent_sequence.png differ diff --git a/docs/media/idempotent_sequence_exception.png b/docs/media/idempotent_sequence_exception.png new file mode 100644 index 00000000000..4cf065993dd Binary files /dev/null and b/docs/media/idempotent_sequence_exception.png differ diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index aa284e7f38b..ca4606e0f40 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -5,13 +5,13 @@ description: Utility The SQS batch processing utility provides a way to handle partial failures when processing batches of messages from SQS. -**Key Features** +## Key Features * Prevent successfully processed messages being returned to SQS * Simple interface for individually processing messages from a batch * Build your own batch processor using the base classes -**Background** +## Background When using SQS as a Lambda event source mapping, Lambda functions are triggered with a batch of messages from SQS. @@ -25,35 +25,76 @@ are returned to the queue. More details on how Lambda works with SQS can be found in the [AWS documentation](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html) +## Getting started -**IAM Permissions** +### IAM Permissions -This utility requires additional permissions to work as expected. Lambda functions using this utility require the `sqs:DeleteMessageBatch` permission. +Before your use this utility, your AWS Lambda function must have `sqs:DeleteMessageBatch` permission to delete successful messages directly from the queue. -## Processing messages from SQS +> Example using AWS Serverless Application Model (SAM) -You can use either **[sqs_batch_processor](#sqs_batch_processor-decorator)** decorator, or **[PartialSQSProcessor](#partialsqsprocessor-context-manager)** as a context manager. +=== "template.yml" + ```yaml hl_lines="2-3 12-15" + Resources: + MyQueue: + Type: AWS::SQS::Queue -They have nearly the same behaviour when it comes to processing messages from the batch: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Runtime: python3.8 + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: example + Policies: + - SQSPollerPolicy: + QueueName: + !GetAtt MyQueue.QueueName + ``` -* **Entire batch has been successfully processed**, where your Lambda handler returned successfully, we will let SQS delete the batch to optimize your cost -* **Entire Batch has been partially processed successfully**, where exceptions were raised within your `record handler`, we will: - - **1)** Delete successfully processed messages from the queue by directly calling `sqs:DeleteMessageBatch` - - **2)** Raise `SQSBatchProcessingError` to ensure failed messages return to your SQS queue +### Processing messages from SQS -The only difference is that **PartialSQSProcessor** will give you access to processed messages if you need. +You can use either **[sqs_batch_processor](#sqs_batch_processor-decorator)** decorator, or **[PartialSQSProcessor](#partialsqsprocessor-context-manager)** as a context manager if you'd like access to the processed results. -## Record Handler +You need to create a function to handle each record from the batch - We call it `record_handler` from here on. -Both decorator and context managers require an explicit function to process the batch of messages - namely `record_handler` parameter. +=== "Decorator" -This function is responsible for processing each individual message from the batch, and to raise an exception if unable to process any of the messages sent. + ```python hl_lines="3 6" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor -**Any non-exception/successful return from your record handler function** will instruct both decorator and context manager to queue up each individual message for deletion. + def record_handler(record): + return do_something_with(record["body"]) -### sqs_batch_processor decorator + @sqs_batch_processor(record_handler=record_handler) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` +=== "Context manager" -When using this decorator, you need provide a function via `record_handler` param that will process individual messages from the batch - It should raise an exception if it is unable to process the record. + ```python hl_lines="3 9 11-12" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + def lambda_handler(event, context): + records = event["Records"] + processor = PartialSQSProcessor() + + with processor(records, record_handler) as proc: + result = proc.process() # Returns a list of all results from record_handler + + return result + ``` + +!!! tip + **Any non-exception/successful return from your record handler function** will instruct both decorator and context manager to queue up each individual message for deletion. + + If the entire batch succeeds, we let Lambda to proceed in deleting the records from the queue for cost reasons. + +### Partial failure mechanics All records in the batch will be passed to this handler for processing, even if exceptions are thrown - Here's the behaviour after completing the batch: @@ -61,29 +102,26 @@ All records in the batch will be passed to this handler for processing, even if * **Any unprocessed messages detected**, we will raise `SQSBatchProcessingError` to ensure failed messages return to your SQS queue !!! warning - You will not have accessed to the processed messages within the Lambda Handler - all processing logic will and should be performed by the record_handler function. + You will not have accessed to the **processed messages** within the Lambda Handler. -=== "app.py" + All processing logic will and should be performed by the `record_handler` function. - ```python - from aws_lambda_powertools.utilities.batch import sqs_batch_processor +## Advanced - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value +### Choosing between decorator and context manager - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` +They have nearly the same behaviour when it comes to processing messages from the batch: -### PartialSQSProcessor context manager +* **Entire batch has been successfully processed**, where your Lambda handler returned successfully, we will let SQS delete the batch to optimize your cost +* **Entire Batch has been partially processed successfully**, where exceptions were raised within your `record handler`, we will: + - **1)** Delete successfully processed messages from the queue by directly calling `sqs:DeleteMessageBatch` + - **2)** Raise `SQSBatchProcessingError` to ensure failed messages return to your SQS queue + +The only difference is that **PartialSQSProcessor** will give you access to processed messages if you need. -If you require access to the result of processed messages, you can use this context manager. +### Accessing processed messages -The result from calling `process()` on the context manager will be a list of all the return values from your `record_handler` function. +Use `PartialSQSProcessor` context manager to access a list of all return values from your `record_handler` function. === "app.py" @@ -91,11 +129,7 @@ The result from calling `process()` on the context manager will be a list of all from aws_lambda_powertools.utilities.batch import PartialSQSProcessor def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - + return do_something_with(record["body"]) def lambda_handler(event, context): records = event["Records"] @@ -108,7 +142,7 @@ The result from calling `process()` on the context manager will be a list of all return result ``` -## Passing custom boto3 config +### Passing custom boto3 config If you need to pass custom configuration such as region to the SDK, you can pass your own [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) to the `sqs_batch_processor` decorator: @@ -159,14 +193,15 @@ the `sqs_batch_processor` decorator: ``` -## Suppressing exceptions +### Suppressing exceptions If you want to disable the default behavior where `SQSBatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. === "Decorator" - ```python hl_lines="2" - ... + ```python hl_lines="3" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor + @sqs_batch_processor(record_handler=record_handler, config=config, suppress_exception=True) def lambda_handler(event, context): return {"statusCode": 200} @@ -174,15 +209,16 @@ If you want to disable the default behavior where `SQSBatchProcessingError` is r === "Context manager" - ```python hl_lines="2" - ... + ```python hl_lines="3" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + processor = PartialSQSProcessor(config=config, suppress_exception=True) with processor(records, record_handler): result = processor.process() ``` -## Create your own partial processor +### Create your own partial processor You can create your own partial batch processor by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()` and `_process_record()`. @@ -192,11 +228,9 @@ You can create your own partial batch processor by inheriting the `BasePartialPr You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function. -**Example:** - === "custom_processor.py" - ```python + ```python hl_lines="3 9 24 30 37 57" from random import randint from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor @@ -223,14 +257,12 @@ You can then use this class as a context manager, or pass it to `batch_processor def _prepare(self): # It's called once, *before* processing # Creates table resource and clean previous results - # E.g.: self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) self.success_messages.clear() def _clean(self): # It's called once, *after* closing processing all records (closing the context manager) # Here we're sending, at once, all successful messages to a ddb table - # E.g.: with ddb_table.batch_writer() as batch: for result in self.success_messages: batch.put_item(Item=result) @@ -239,7 +271,6 @@ You can then use this class as a context manager, or pass it to `batch_processor # It handles how your record is processed # Here we're keeping the status of each run # where self.handler is the record_handler function passed as an argument - # E.g.: try: result = self.handler(record) # record_handler passed to decorator/context manager return self.success_handler(record, result) @@ -260,3 +291,24 @@ You can then use this class as a context manager, or pass it to `batch_processor def lambda_handler(event, context): return {"statusCode": 200} ``` + +### Integrating exception handling with Sentry.io + +When using Sentry.io for error monitoring, you can override `failure_handler` to include to capture each processing exception: + +> Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732) + +=== "sentry_integration.py" + + ```python hl_lines="4 7-8" + from typing import Tuple + + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from sentry_sdk import capture_exception + + class SQSProcessor(PartialSQSProcessor): + def failure_handler(self, record: Event, exception: Tuple) -> Tuple: # type: ignore + capture_exception() # send exception to Sentry + logger.exception("got exception while processing SQS message") + return super().failure_handler(record, exception) # type: ignore + ``` diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 464942995fb..d03763ab574 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -5,7 +5,7 @@ description: Utility The event source data classes utility provides classes describing the schema of common Lambda events triggers. -**Key Features** +## Key Features * Type hinting and code completion for common event types * Helper functions for decoding/deserializing nested fields @@ -17,13 +17,30 @@ When authoring Lambda functions, you often need to understand the schema of the handler. There are several common event types which follow a specific schema, depending on the service triggering the Lambda function. +## Getting started -## Utilizing the data classes +### Utilizing the data classes The classes are initialized by passing in the Lambda event object into the constructor of the appropriate data class. + For example, if your Lambda function is being triggered by an API Gateway proxy integration, you can use the `APIGatewayProxyEvent` class. +=== "app.py" + + ```python hl_lines="1 4" + from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent + + def lambda_handler(event, context): + event: APIGatewayProxyEvent = APIGatewayProxyEvent(event) + + if 'helloworld' in event.path && event.http_method == 'GET': + do_something_with(event.body, user) + ``` + +**Autocomplete with self-documented properties and methods** + + ![Utilities Data Classes](../media/utilities_data_classes.png) @@ -49,7 +66,7 @@ Event Source | Data_class documentation inherently (via autocompletion, types and docstrings). -## API Gateway Proxy +### API Gateway Proxy Typically used for API Gateway REST API or HTTP API using v1 proxy event. @@ -68,7 +85,7 @@ Typically used for API Gateway REST API or HTTP API using v1 proxy event. do_something_with(event.body, user) ``` -## API Gateway Proxy v2 +### API Gateway Proxy v2 === "lambda_app.py" @@ -84,7 +101,7 @@ Typically used for API Gateway REST API or HTTP API using v1 proxy event. do_something_with(event.body, query_string_parameters) ``` -## CloudWatch Logs +### CloudWatch Logs CloudWatch Logs events by default are compressed and base64 encoded. You can use the helper function provided to decode, decompress and parse json data from the event. @@ -103,7 +120,7 @@ decompress and parse json data from the event. do_something_with(event.timestamp, event.message) ``` -## Cognito User Pool +### Cognito User Pool Cognito User Pools have several [different Lambda trigger sources](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html#cognito-user-identity-pools-working-with-aws-lambda-trigger-sources), all of which map to a different data class, which can be imported from `aws_lambda_powertools.data_classes.cognito_user_pool_event`: @@ -133,7 +150,7 @@ Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChalleng do_something_with(user_attributes) ``` -## DynamoDB Streams +### DynamoDB Streams The DynamoDB data class utility provides the base class for `DynamoDBStreamEvent`, a typed class for attributes values (`AttributeValue`), as well as enums for stream view type (`StreamViewType`) and event type @@ -142,7 +159,7 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "lambda_app.py" ```python - from aws_lambda_powertools.utilities.data_classes import DynamoDBStreamEvent, DynamoDBRecordEventName + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBStreamEvent, DynamoDBRecordEventName def lambda_handler(event, context): event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) @@ -154,7 +171,7 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St do_something_with(record.dynamodb.old_image) ``` -## EventBridge +### EventBridge === "lambda_app.py" @@ -167,7 +184,7 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St ``` -## Kinesis streams +### Kinesis streams Kinesis events by default contain base64 encoded data. You can use the helper function to access the data either as json or plain text, depending on the original payload. @@ -189,7 +206,7 @@ or plain text, depending on the original payload. do_something_with(data) ``` -## S3 +### S3 === "lambda_app.py" @@ -207,7 +224,7 @@ or plain text, depending on the original payload. do_something_with(f'{bucket_name}/{object_key}') ``` -## SES +### SES === "lambda_app.py" @@ -225,7 +242,7 @@ or plain text, depending on the original payload. do_something_with(common_headers.to, common_headers.subject) ``` -## SNS +### SNS === "lambda_app.py" @@ -243,7 +260,7 @@ or plain text, depending on the original payload. do_something_with(subject, message) ``` -## SQS +### SQS === "lambda_app.py" @@ -257,3 +274,25 @@ or plain text, depending on the original payload. for record in event.records: do_something_with(record.body) ``` + +### Connect + +**Connect Contact Flow** + +=== "lambda_app.py" + + ```python + from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( + ConnectContactFlowChannel, + ConnectContactFlowEndpointType, + ConnectContactFlowEvent, + ConnectContactFlowInitiationMethod, + ) + + def lambda_handler(event, context): + event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) + assert event.contact_data.attributes == {"Language": "en-US"} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API + ``` diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md new file mode 100644 index 00000000000..a850a8cfa50 --- /dev/null +++ b/docs/utilities/idempotency.md @@ -0,0 +1,685 @@ +--- +title: Idempotency +description: Utility +--- + +!!! attention + **This utility is currently in beta**. Please open an [issue in GitHub](https://github.com/awslabs/aws-lambda-powertools-python/issues/new/choose) for any bugs or feature requests. + +The idempotency utility provides a simple solution to convert your Lambda functions into idempotent operations which +are safe to retry. + +## Terminology + +The property of idempotency means that an operation does not cause additional side effects if it is called more than +once with the same input parameters. + +**Idempotent operations will return the same result when they are called multiple +times with the same parameters**. This makes idempotent operations safe to retry. + +**Idempotency key** is a hash representation of either the entire event or a specific configured subset of the event, and invocation results are **JSON serialized** and stored in your persistence storage layer. + +## Key features + +* Prevent Lambda handler from executing more than once on the same event payload during a time window +* Ensure Lambda handler returns the same result when called with the same payload +* Select a subset of the event as the idempotency key using JMESPath expressions +* Set a time window in which records with the same payload should be considered duplicates + +## Getting started + +### Required resources + +Before getting started, you need to create a persistent storage layer where the idempotency utility can store its +state - your lambda functions will need read and write access to it. + +As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. + +> Example using AWS Serverless Application Model (SAM) + +=== "template.yml" + + ```yaml hl_lines="5-13 21-23" + Resources: + IdempotencyTable: + Type: AWS::DynamoDB::Table + Properties: + AttributeDefinitions: + - AttributeName: id + AttributeType: S + KeySchema: + - AttributeName: id + KeyType: HASH + TimeToLiveSpecification: + AttributeName: expiration + Enabled: true + BillingMode: PAY_PER_REQUEST + + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Runtime: python3.8 + ... + Policies: + - DynamoDBCrudPolicy: + TableName: !Ref IdempotencyTable + ``` + +!!! warning "Large responses with DynamoDB persistence layer" + When using this utility with DynamoDB, your function's responses must be [smaller than 400KB](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html#limits-items). + + Larger items cannot be written to DynamoDB and will cause exceptions. + +!!! info "DynamoDB " + Each function invocation will generally make 2 requests to DynamoDB. If the + result returned by your Lambda is less than 1kb, you can expect 2 WCUs per invocation. For retried invocations, you will + see 1WCU and 1RCU. Review the [DynamoDB pricing documentation](https://aws.amazon.com/dynamodb/pricing/) to + estimate the cost. + +!!! danger "CREATE SECTION FOR PERSISTENCE LAYERS" + +### Idempotent decorator + +You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. + +=== "app.py" + + ```python hl_lines="1 5 7 14" + from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + @idempotent(persistence_store=persistence_layer) + def handler(event, context): + payment = create_subscription_payment( + user=event['user'], + product=event['product_id'] + ) + ... + return { + "payment_id": payment.id + "message": "success", + "statusCode": 200, + } + ``` + +=== "Example event" + + ```json + { + "username": "xyz", + "product_id": "123456789" + } + ``` + +#### Choosing a payload subset for idempotency + +!!! tip "Dealing with always changing payloads" + When dealing with a more elaborate payload, where parts of the payload always change, you should use **`event_key_jmespath`** parameter. + +Use [`IdempotencyConfig`](#customizing-the-default-behaviour) to instruct the idempotent decorator to only use a portion of your payload to verify whether a request is idempotent, and therefore it should not be retried. + +> **Payment scenario** + +In this example, we have a Lambda handler that creates a payment for a user subscribing to a product. We want to ensure that we don't accidentally charge our customer by subscribing them more than once. + +Imagine the function executes successfully, but the client never receives the response due to a connection issue. It is safe to retry in this instance, as the idempotent decorator will return a previously saved response. + +=== "payment.py" + + ```python hl_lines="2-4 10 12 15 20" + import json + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + # Treat everything under the "body" key + # in the event json object as our payload + config = IdempotencyConfig(event_key_jmespath="body") + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + body = json.loads(event['body']) + payment = create_subscription_payment( + user=body['user'], + product=body['product_id'] + ) + ... + return { + "payment_id": payment.id, + "message": "success", + "statusCode": 200 + } + ``` + +=== "Example event" + + ```json hl_lines="28" + { + "version":"2.0", + "routeKey":"ANY /createpayment", + "rawPath":"/createpayment", + "rawQueryString":"", + "headers": { + "Header1": "value1", + "Header2": "value2" + }, + "requestContext":{ + "accountId":"123456789012", + "apiId":"api-id", + "domainName":"id.execute-api.us-east-1.amazonaws.com", + "domainPrefix":"id", + "http":{ + "method":"POST", + "path":"/createpayment", + "protocol":"HTTP/1.1", + "sourceIp":"ip", + "userAgent":"agent" + }, + "requestId":"id", + "routeKey":"ANY /createpayment", + "stage":"$default", + "time":"10/Feb/2021:13:40:43 +0000", + "timeEpoch":1612964443723 + }, + "body":"{\"username\":\"xyz\",\"product_id\":\"123456789\"}", + "isBase64Encoded":false + } + ``` + +#### Idempotency request flow + +This sequence diagram shows an example flow of what happens in the payment scenario: + +![Idempotent sequence](../media/idempotent_sequence.png) + +The client was successful in receiving the result after the retry. Since the Lambda handler was only executed once, our customer hasn't been charged twice. + +!!! note + Bear in mind that the entire Lambda handler is treated as a single idempotent operation. If your Lambda handler can cause multiple side effects, consider splitting it into separate functions. + +### Handling exceptions + +**The record in the persistence layer will be deleted** if your Lambda handler returns an exception. This means that new invocations will execute again despite having the same payload. + +If you don't want the record to be deleted, you need to catch exceptions within the handler and return a successful response. + +![Idempotent sequence exception](../media/idempotent_sequence_exception.png) + +!!! warning + **We will raise `IdempotencyPersistenceLayerError`** if any of the calls to the persistence layer fail unexpectedly. + + As this happens outside the scope of your Lambda handler, you are not going to be able to catch it. + +### Persistence layers + +#### DynamoDBPersistenceLayer + +This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). + +=== "app.py" + + ```python hl_lines="3-7" + from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer + + persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + key_attr="idempotency_key", + expiry_attr="expires_at", + status_attr="current_status", + data_attr="result_data", + validation_key_attr="validation_key" + ) + ``` + +These are knobs you can use when using DynamoDB as a persistence layer: + +Parameter | Required | Default | Description +------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- +**table_name** | :heavy_check_mark: | | Table name to store state +**key_attr** | | `id` | Primary key of the table. Hashed representation of the payload +**expiry_attr** | | `expiration` | Unix timestamp of when record expires +**status_attr** | | `status` | Stores status of the lambda execution during and after invocation +**data_attr** | | `data` | Stores results of successfully executed Lambda handlers +**validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation +## Advanced + +### Customizing the default behavior + +Idempotent decorator can be further configured with **`IdempotencyConfig`** as seen in the previous example. These are the available options for further configuration + +Parameter | Default | Description +------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- +**event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record +**payload_validation_jmespath** | `""` | JMESPath expression to validate whether certain parameters have changed in the event while the event payload +**raise_on_no_idempotency_key** | `False` | Raise exception if no idempotency key was found in the request +**expires_after_seconds** | 3600 | The number of seconds to wait before a record is expired +**use_local_cache** | `False` | Whether to locally cache idempotency results +**local_cache_max_items** | 1024 | Max number of items to store in local cache +**hash_function** | `md5` | Function to use for calculating hashes, as provided by [hashlib](https://docs.python.org/3/library/hashlib.html) in the standard library. + +### Handling concurrent executions with the same payload + +This utility will raise an **`IdempotencyAlreadyInProgressError`** exception if you receive **multiple invocations with the same payload while the first invocation hasn't completed yet**. + +!!! info "If you receive `IdempotencyAlreadyInProgressError`, you can safely retry the operation." + +This is a locking mechanism for correctness. Since we don't know the result from the first invocation yet, we can't safely allow another concurrent execution. + +### Using in-memory cache + +**By default, in-memory local caching is disabled**, since we don't know how much memory you consume per invocation compared to the maximum configured in your Lambda function. + +!!! note "This in-memory cache is local to each Lambda execution environment" + This means it will be effective in cases where your function's concurrency is low in comparison to the number of "retry" invocations with the same payload, because cache might be empty. + +You can enable in-memory caching with the **`use_local_cache`** parameter: + +=== "app.py" + + ```python hl_lines="6 8 11" + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + config = IdempotencyConfig( + event_key_jmespath="body", + expires_after_seconds=5*60, # 5 minutes + use_local_cache=True + ) + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + ... + ``` + +When enabled, the default is to cache a maximum of 256 records in each Lambda execution environment - You can change it with the **`local_cache_max_items`** parameter. +### Expiring idempotency records + +!!! note + By default, we expire idempotency records after **an hour** (3600 seconds). + +In most cases, it is not desirable to store the idempotency records forever. Rather, you want to guarantee that the same payload won't be executed within a period of time. + +You can change this window with the **`expires_after_seconds`** parameter: + +=== "app.py" + + ```python hl_lines="6 8 11" + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + config = IdempotencyConfig( + event_key_jmespath="body", + expires_after_seconds=5*60, # 5 minutes + ) + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + ... + ``` + +This will mark any records older than 5 minutes as expired, and the lambda handler will be executed as normal if it is invoked with a matching payload. + +!!! note "DynamoDB time-to-live field" + This utility uses **`expiration`** as the TTL field in DynamoDB, as [demonstrated in the SAM example earlier](#required-resources). + +### Payload validation + +!!! question "What if your function is invoked with the same payload except some outer parameters have changed?" + Example: A payment transaction for a given productID was requested twice for the same customer, **however the amount to be paid has changed in the second transaction**. + +By default, we will return the same result as it returned before, however in this instance it may be misleading - We provide a fail fast payload validation to address this edge case. + +With **`payload_validation_jmespath`**, you can provide an additional JMESPath expression to specify which part of the event body should be validated against previous idempotent invocations + +=== "app.py" + + ```python hl_lines="7 11 18 25" + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + config = IdempotencyConfig( + event_key_jmespath="[userDetail, productId]", + payload_validation_jmespath="amount" + ) + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + # Creating a subscription payment is a side + # effect of calling this function! + payment = create_subscription_payment( + user=event['userDetail']['username'], + product=event['product_id'], + amount=event['amount'] + ) + ... + return { + "message": "success", + "statusCode": 200, + "payment_id": payment.id, + "amount": payment.amount + } + ``` + +=== "Example Event 1" + + ```json hl_lines="8" + { + "userDetail": { + "username": "User1", + "user_email": "user@example.com" + }, + "productId": 1500, + "charge_type": "subscription", + "amount": 500 + } + ``` + +=== "Example Event 2" + + ```json hl_lines="8" + { + "userDetail": { + "username": "User1", + "user_email": "user@example.com" + }, + "productId": 1500, + "charge_type": "subscription", + "amount": 1 + } + ``` + +In this example, the **`userDetail`** and **`productId`** keys are used as the payload to generate the idempotency key, as per **`event_key_jmespath`** parameter. + +!!! note + If we try to send the same request but with a different amount, we will raise **`IdempotencyValidationError`**. + +Without payload validation, we would have returned the same result as we did for the initial request. Since we're also returning an amount in the response, this could be quite confusing for the client. + +By using **`payload_validation_jmespath="amount"`**, we prevent this potentially confusing behavior and instead raise an Exception. + +### Making idempotency key required + +If you want to enforce that an idempotency key is required, you can set **`raise_on_no_idempotency_key`** to `True`. + +This means that we will raise **`IdempotencyKeyError`** if the evaluation of **`event_key_jmespath`** is `None`. + +=== "app.py" + + ```python hl_lines="9-10 13" + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + # Requires "user"."uid" and "order_id" to be present + config = IdempotencyConfig( + event_key_jmespath="[user.uid, order_id]", + raise_on_no_idempotency_key=True, + ) + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + pass + ``` + +=== "Success Event" + + ```json hl_lines="3 6" + { + "user": { + "uid": "BB0D045C-8878-40C8-889E-38B3CB0A61B1", + "name": "Foo" + }, + "order_id": 10000 + } + ``` + +=== "Failure Event" + + Notice that `order_id` is now accidentally within `user` key + + ```json hl_lines="3 5" + { + "user": { + "uid": "DE0D000E-1234-10D1-991E-EAC1DD1D52C8", + "name": "Joe Bloggs", + "order_id": 10000 + }, + } + ``` + +### Customizing boto configuration + +You can provide a custom boto configuration via **`boto_config`**, or an existing boto session via **`boto3_session`** parameters, when constructing the persistence store. + +=== "Custom session" + + ```python hl_lines="1 6 9 14" + import boto3 + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + boto3_session = boto3.session.Session() + persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + boto3_session=boto3_session + ) + + config = IdempotencyConfig(event_key_jmespath="body") + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + ... + ``` +=== "Custom config" + + ```python hl_lines="1 7 10" + from botocore.config import Config + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + config = IdempotencyConfig(event_key_jmespath="body") + boto_config = Config() + persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + boto_config=boto_config + ) + + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event, context): + ... + ``` + +### Bring your own persistent store + +This utility provides an abstract base class (ABC), so that you can implement your choice of persistent storage layer. + +You can inherit from the `BasePersistenceLayer` class and implement the abstract methods `_get_record`, `_put_record`, +`_update_record` and `_delete_record`. + +=== "DynamoDB persistence layer implementation excerpt" + + ```python hl_lines="8-13 57 65 74 96 124" + import datetime + import logging + from typing import Any, Dict, Optional + + import boto3 + from botocore.config import Config + + from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer + from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, + ) + from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord + + logger = logging.getLogger(__name__) + + + class DynamoDBPersistenceLayer(BasePersistenceLayer): + def __init__( + self, + table_name: str, + key_attr: str = "id", + expiry_attr: str = "expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + boto_config: Optional[Config] = None, + boto3_session: Optional[boto3.session.Session] = None, + ): + boto_config = boto_config or Config() + session = boto3_session or boto3.session.Session() + self._ddb_resource = session.resource("dynamodb", config=boto_config) + self.table_name = table_name + self.table = self._ddb_resource.Table(self.table_name) + self.key_attr = key_attr + self.expiry_attr = expiry_attr + self.status_attr = status_attr + self.data_attr = data_attr + self.validation_key_attr = validation_key_attr + super(DynamoDBPersistenceLayer, self).__init__() + + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + """ + Translate raw item records from DynamoDB to DataRecord + + Parameters + ---------- + item: Dict[str, Union[str, int]] + Item format from dynamodb response + + Returns + ------- + DataRecord + representation of item + + """ + return DataRecord( + idempotency_key=item[self.key_attr], + status=item[self.status_attr], + expiry_timestamp=item[self.expiry_attr], + response_data=item.get(self.data_attr), + payload_hash=item.get(self.validation_key_attr), + ) + + def _get_record(self, idempotency_key) -> DataRecord: + response = self.table.get_item(Key={self.key_attr: idempotency_key}, ConsistentRead=True) + + try: + item = response["Item"] + except KeyError: + raise IdempotencyItemNotFoundError + return self._item_to_data_record(item) + + def _put_record(self, data_record: DataRecord) -> None: + item = { + self.key_attr: data_record.idempotency_key, + self.expiry_attr: data_record.expiry_timestamp, + self.status_attr: data_record.status, + } + + if self.payload_validation_enabled: + item[self.validation_key_attr] = data_record.payload_hash + + now = datetime.datetime.now() + try: + logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") + self.table.put_item( + Item=item, + ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", + ExpressionAttributeValues={":now": int(now.timestamp())}, + ) + except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: + logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") + raise IdempotencyItemAlreadyExistsError + + def _update_record(self, data_record: DataRecord): + logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + update_expression = "SET #response_data = :response_data, #expiry = :expiry, #status = :status" + expression_attr_values = { + ":expiry": data_record.expiry_timestamp, + ":response_data": data_record.response_data, + ":status": data_record.status, + } + expression_attr_names = { + "#response_data": self.data_attr, + "#expiry": self.expiry_attr, + "#status": self.status_attr, + } + + if self.payload_validation_enabled: + update_expression += ", #validation_key = :validation_key" + expression_attr_values[":validation_key"] = data_record.payload_hash + expression_attr_names["#validation_key"] = self.validation_key_attr + + kwargs = { + "Key": {self.key_attr: data_record.idempotency_key}, + "UpdateExpression": update_expression, + "ExpressionAttributeValues": expression_attr_values, + "ExpressionAttributeNames": expression_attr_names, + } + + self.table.update_item(**kwargs) + + def _delete_record(self, data_record: DataRecord) -> None: + logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + self.table.delete_item(Key={self.key_attr: data_record.idempotency_key},) + ``` + +!!! danger + Pay attention to the documentation for each - you may need to perform additional checks inside these methods to ensure the idempotency guarantees remain intact. + + For example, the `_put_record` method needs to raise an exception if a non-expired record already exists in the data store with a matching key. + +## Compatibility with other utilities + +### Validation utility + +The idempotency utility can be used with the `validator` decorator. Ensure that idempotency is the innermost decorator. + +!!! warning + If you use an envelope with the validator, the event received by the idempotency utility will be the unwrapped + event - not the "raw" event Lambda was invoked with. You will need to account for this if you set the + `event_key_jmespath`. + +=== "app.py" + + ```python hl_lines="9 10" + from aws_lambda_powertools.utilities.validation import validator, envelopes + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + config = IdempotencyConfig(event_key_jmespath="[message, username]") + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + @validator(envelope=envelopes.API_GATEWAY_HTTP) + @idempotent(config=config, persistence_store=persistence_layer) + def lambda_handler(event, context): + cause_some_side_effects(event['username') + return {"message": event['message'], "statusCode": 200} + ``` + +!!! tip "JMESPath Powertools functions are also available" + Built-in functions known in the validation utility like `powertools_json`, `powertools_base64`, `powertools_base64_gzip` are also available to use in this utility. + +## Extra resources + +If you're interested in a deep dive on how Amazon uses idempotency when building our APIs, check out +[this article](https://aws.amazon.com/builders-library/making-retries-safe-with-idempotent-APIs/). diff --git a/examples/__init__.py b/examples/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/mkdocs.yml b/mkdocs.yml index 0298a4864a6..d8d37830369 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -17,6 +17,7 @@ nav: - utilities/validation.md - utilities/data_classes.md - utilities/parser.md + - utilities/idempotency.md theme: name: material @@ -46,6 +47,7 @@ markdown_extensions: permalink: true toc_depth: 4 - attr_list + - pymdownx.emoji copyright: Copyright © 2021 Amazon Web Services diff --git a/poetry.lock b/poetry.lock index dadec35e65d..da93eb54a0c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -44,16 +44,16 @@ wrapt = "*" [[package]] name = "bandit" -version = "1.6.2" +version = "1.7.0" description = "Security oriented static analyser for python code." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" -PyYAML = ">=3.13" +PyYAML = ">=5.3.1" six = ">=1.10.0" stevedore = ">=1.20.0" @@ -79,45 +79,29 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.16.23" +version = "1.17.20" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = "*" +python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.19.23,<1.20.0" +botocore = ">=1.20.20,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.3.0,<0.4.0" [[package]] name = "botocore" -version = "1.19.23" +version = "1.20.20" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = "*" +python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] jmespath = ">=0.7.1,<1.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<1.27", markers = "python_version != \"3.4\""} - -[[package]] -name = "certifi" -version = "2020.11.8" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "chardet" -version = "3.0.4" -description = "Universal encoding detector for Python 2 and 3" -category = "dev" -optional = false -python-versions = "*" +urllib3 = ">=1.25.4,<1.27" [[package]] name = "click" @@ -137,7 +121,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "5.3" +version = "5.5" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -159,7 +143,7 @@ python-versions = ">=3.6, <3.7" [[package]] name = "dnspython" -version = "2.0.0" +version = "2.1.0" description = "DNS toolkit" category = "main" optional = true @@ -194,7 +178,7 @@ python-versions = "*" [[package]] name = "fastjsonschema" -version = "2.14.5" +version = "2.15.0" description = "Fastest Python implementation of JSON schema" category = "main" optional = false @@ -231,7 +215,7 @@ flake8 = ">=3.0.0" [[package]] name = "flake8-bugbear" -version = "20.1.4" +version = "20.11.1" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -241,6 +225,9 @@ python-versions = ">=3.6" attrs = ">=19.2.0" flake8 = ">=3.0.0" +[package.extras] +dev = ["coverage", "black", "hypothesis", "hypothesmith"] + [[package]] name = "flake8-builtins" version = "1.5.3" @@ -257,11 +244,11 @@ test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] [[package]] name = "flake8-comprehensions" -version = "3.3.0" +version = "3.3.1" description = "A flake8 plugin to help you write better list/set/dict comprehensions." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" @@ -356,7 +343,7 @@ smmap = ">=3.0.1,<4" [[package]] name = "gitpython" -version = "3.1.11" +version = "3.1.14" description = "Python Git Library" category = "dev" optional = false @@ -367,26 +354,27 @@ gitdb = ">=4.0.1,<5" [[package]] name = "idna" -version = "2.10" +version = "3.1" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +optional = true +python-versions = ">=3.4" [[package]] name = "importlib-metadata" -version = "2.0.0" +version = "3.7.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "rst.linker"] -testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "isort" @@ -429,7 +417,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "joblib" -version = "1.0.0" +version = "1.0.1" description = "Lightweight pipelining with Python functions" category = "dev" optional = false @@ -437,18 +425,18 @@ python-versions = ">=3.6" [[package]] name = "jsonpickle" -version = "1.4.1" +version = "2.0.0" description = "Python library for serializing any arbitrary object graph into JSON" category = "main" optional = false python-versions = ">=2.7" [package.dependencies] -importlib-metadata = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["coverage (<5)", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "sqlalchemy", "enum34", "jsonlib"] +testing = ["coverage (<5)", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "sklearn", "sqlalchemy", "enum34", "jsonlib"] "testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] [[package]] @@ -481,7 +469,7 @@ languages = ["nltk (>=3.2.5,<3.5)", "nltk (>=3.2.5)"] [[package]] name = "mako" -version = "1.1.3" +version = "1.1.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "dev" optional = false @@ -510,7 +498,7 @@ restructuredText = ["rst2ansi"] [[package]] name = "markdown" -version = "3.3.3" +version = "3.3.4" description = "Python implementation of Markdown." category = "dev" optional = false @@ -570,7 +558,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "6.2.7" +version = "6.2.8" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -596,7 +584,7 @@ mkdocs-material = ">=5.0.0" [[package]] name = "more-itertools" -version = "8.6.0" +version = "8.7.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false @@ -626,7 +614,7 @@ twitter = ["twython"] [[package]] name = "packaging" -version = "20.4" +version = "20.9" description = "Core utilities for Python packages" category = "dev" optional = false @@ -634,7 +622,6 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pyparsing = ">=2.0.2" -six = "*" [[package]] name = "pathspec" @@ -680,7 +667,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "py" -version = "1.9.0" +version = "1.10.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false @@ -696,7 +683,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.7.2" +version = "1.7.3" description = "Data validation and settings management using python 3.6 type hinting" category = "main" optional = true @@ -720,7 +707,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.4" +version = "2.8.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -784,14 +771,14 @@ testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.10.1" +version = "2.11.1" description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -coverage = ">=4.4" +coverage = ">=5.2.1" pytest = ">=4.6" [package.extras] @@ -824,11 +811,11 @@ six = ">=1.5" [[package]] name = "pyyaml" -version = "5.3.1" +version = "5.4.1" description = "YAML parser and emitter for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "radon" @@ -857,25 +844,19 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.0" +version = "2.15.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -certifi = ">=2017.4.17" -chardet = ">=3.0.2,<4" -idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.27" +python-versions = "*" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +security = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "s3transfer" -version = "0.3.3" +version = "0.3.4" description = "An Amazon S3 Transfer Manager" category = "main" optional = false @@ -894,7 +875,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "3.0.4" +version = "3.0.5" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false @@ -902,7 +883,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "stevedore" -version = "3.2.2" +version = "3.3.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -914,7 +895,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "testfixtures" -version = "6.15.0" +version = "6.17.1" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -943,7 +924,7 @@ python-versions = ">= 3.5" [[package]] name = "tqdm" -version = "4.56.0" +version = "4.58.0" description = "Fast, Extensible Progress Meter" category = "dev" optional = false @@ -955,7 +936,7 @@ telegram = ["requests"] [[package]] name = "typed-ast" -version = "1.4.1" +version = "1.4.2" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -966,12 +947,12 @@ name = "typing-extensions" version = "3.7.4.3" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" -optional = true +optional = false python-versions = "*" [[package]] name = "urllib3" -version = "1.26.2" +version = "1.26.3" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1029,7 +1010,7 @@ pydantic = ["pydantic", "typing_extensions", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "9c967dd4ada2788799ce868acf31c73849603495047390e4e94e88f8693e40ae" +content-hash = "c8b988d7cf8dda9209e5af324048ce0a56ae3ddbdd33457c9a4791ef89cb47d8" [metadata.files] appdirs = [ @@ -1049,28 +1030,20 @@ aws-xray-sdk = [ {file = "aws_xray_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:076f7c610cd3564bbba3507d43e328fb6ff4a2e841d3590f39b2c3ce99d41e1d"}, ] bandit = [ - {file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"}, - {file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"}, + {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, + {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, ] black = [ {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] boto3 = [ - {file = "boto3-1.16.23-py2.py3-none-any.whl", hash = "sha256:22a6f11383965d7ece9e391722b2989780960c62997b1aa464ffa1f886e1cfa8"}, - {file = "boto3-1.16.23.tar.gz", hash = "sha256:6e6bd178f930309c2ec79643436aae5cf6f26d51e35aa5e58162675a04785e62"}, + {file = "boto3-1.17.20-py2.py3-none-any.whl", hash = "sha256:c0d51f344b71656c2d395d2168600d91bea252a64fb5d503a955ea96426cde8b"}, + {file = "boto3-1.17.20.tar.gz", hash = "sha256:2219f1ebe88d266afa5516f993983eba8742b957fa4fd6854f3c73aa3030e931"}, ] botocore = [ - {file = "botocore-1.19.23-py2.py3-none-any.whl", hash = "sha256:d73a223bf88d067c3ae0a9a3199abe56e99c94267da77d7fed4c39f572f522c0"}, - {file = "botocore-1.19.23.tar.gz", hash = "sha256:9f9efca44b2ab2d9c133ceeafa377e4b3d260310109284123ebfffc15e28481e"}, -] -certifi = [ - {file = "certifi-2020.11.8-py2.py3-none-any.whl", hash = "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd"}, - {file = "certifi-2020.11.8.tar.gz", hash = "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"}, -] -chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, + {file = "botocore-1.20.20-py2.py3-none-any.whl", hash = "sha256:e9e724b59278ebf5caf032be1e32bde0990d79e8052e3bbbb97b6c1d32feba28"}, + {file = "botocore-1.20.20.tar.gz", hash = "sha256:80c32a81fb1ee8bdfa074a79bfb885bb2006e8a9782f2353c0c9f6392704e13a"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -1081,48 +1054,66 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, - {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, - {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, - {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, - {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, - {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, - {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, - {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, - {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, - {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, - {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, - {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, - {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, - {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, - {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, - {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, - {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, - {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, - {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, - {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] dnspython = [ - {file = "dnspython-2.0.0-py3-none-any.whl", hash = "sha256:40bb3c24b9d4ec12500f0124288a65df232a3aa749bb0c39734b782873a2544d"}, - {file = "dnspython-2.0.0.zip", hash = "sha256:044af09374469c3a39eeea1a146e8cac27daec951f1f1f157b1962fc7cb9d1b7"}, + {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, + {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, ] email-validator = [ {file = "email-validator-1.1.2.tar.gz", hash = "sha256:1a13bd6050d1db4475f13e444e169b6fe872434922d38968c67cea9568cce2f0"}, @@ -1132,8 +1123,8 @@ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] fastjsonschema = [ - {file = "fastjsonschema-2.14.5-py3-none-any.whl", hash = "sha256:467593c61f5ba8307205a3536313a774b37df91c9a937c5267c11aee5256e77e"}, - {file = "fastjsonschema-2.14.5.tar.gz", hash = "sha256:afbc235655f06356e46caa80190512e4d9222abfaca856041be5a74c665fa094"}, + {file = "fastjsonschema-2.15.0-py3-none-any.whl", hash = "sha256:b3da206676f8b4906debf6a17b650b858c92cb304cbe0c8aa81799bde6a6b858"}, + {file = "fastjsonschema-2.15.0.tar.gz", hash = "sha256:e1ecba260bcffb7de0dda6aee74261da1e6dccde5ee04c1170b2dd97d2b87676"}, ] flake8 = [ {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, @@ -1143,16 +1134,16 @@ flake8-black = [ {file = "flake8-black-0.1.2.tar.gz", hash = "sha256:b79d8d868bd42dc2c1f27469b92a984ecab3579ad285a8708ea5f19bf6c1f3a2"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-20.1.4.tar.gz", hash = "sha256:bd02e4b009fb153fe6072c31c52aeab5b133d508095befb2ffcf3b41c4823162"}, - {file = "flake8_bugbear-20.1.4-py36.py37.py38-none-any.whl", hash = "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63"}, + {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, + {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.3.0.tar.gz", hash = "sha256:355ef47288523cad7977cb9c1bc81b71c82b7091e425cd9fbcd7e5c19a613677"}, - {file = "flake8_comprehensions-3.3.0-py3-none-any.whl", hash = "sha256:c1dd6d8a00e9722619a5c5e0e6c5747f5cf23c089032c86eaf614c14a2e40adb"}, + {file = "flake8-comprehensions-3.3.1.tar.gz", hash = "sha256:e734bf03806bb562886d9bf635d23a65a1a995c251b67d7e007a7b608af9bd22"}, + {file = "flake8_comprehensions-3.3.1-py3-none-any.whl", hash = "sha256:6d80dfafda0d85633f88ea5bc7de949485f71f1e28db7af7719563fe5f62dcb1"}, ] flake8-debugger = [ {file = "flake8-debugger-3.2.1.tar.gz", hash = "sha256:712d7c1ff69ddf3f0130e94cc88c2519e720760bce45e8c330bfdcb61ab4090d"}, @@ -1184,16 +1175,16 @@ gitdb = [ {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.11-py3-none-any.whl", hash = "sha256:6eea89b655917b500437e9668e4a12eabdcf00229a0df1762aabd692ef9b746b"}, - {file = "GitPython-3.1.11.tar.gz", hash = "sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"}, + {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, + {file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.1-py3-none-any.whl", hash = "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16"}, + {file = "idna-3.1.tar.gz", hash = "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"}, ] importlib-metadata = [ - {file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"}, - {file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"}, + {file = "importlib_metadata-3.7.0-py3-none-any.whl", hash = "sha256:c6af5dbf1126cd959c4a8d8efd61d4d3c83bddb0459a17e554284a077574b614"}, + {file = "importlib_metadata-3.7.0.tar.gz", hash = "sha256:24499ffde1b80be08284100393955842be4a59c7c16bbf2738aad0e464a8e0aa"}, ] isort = [ {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, @@ -1208,12 +1199,12 @@ jmespath = [ {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] joblib = [ - {file = "joblib-1.0.0-py3-none-any.whl", hash = "sha256:75ead23f13484a2a414874779d69ade40d4fa1abe62b222a23cd50d4bc822f6f"}, - {file = "joblib-1.0.0.tar.gz", hash = "sha256:7ad866067ac1fdec27d51c8678ea760601b70e32ff1881d4dc8e1171f2b64b24"}, + {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, + {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, ] jsonpickle = [ - {file = "jsonpickle-1.4.1-py2.py3-none-any.whl", hash = "sha256:8919c166bac0574e3d74425c7559434062002d9dfc0ac2afa6dc746ba4a19439"}, - {file = "jsonpickle-1.4.1.tar.gz", hash = "sha256:e8d4b7cd0bd6826001a74377df1079a76ad8bae0f909282de2554164c837c8ba"}, + {file = "jsonpickle-2.0.0-py2.py3-none-any.whl", hash = "sha256:c1010994c1fbda87a48f8a56698605b598cb0fc6bb7e7927559fc1100e69aeac"}, + {file = "jsonpickle-2.0.0.tar.gz", hash = "sha256:0be49cba80ea6f87a168aa8168d717d00c6ca07ba83df3cec32d3b30bfe6fb9a"}, ] livereload = [ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, @@ -1223,16 +1214,15 @@ lunr = [ {file = "lunr-0.5.8.tar.gz", hash = "sha256:c4fb063b98eff775dd638b3df380008ae85e6cb1d1a24d1cd81a10ef6391c26e"}, ] mako = [ - {file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, - {file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, + {file = "Mako-1.1.4.tar.gz", hash = "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab"}, ] mando = [ {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, ] markdown = [ - {file = "Markdown-3.3.3-py3-none-any.whl", hash = "sha256:c109c15b7dc20a9ac454c9e6025927d44460b85bd039da028d85e2b6d0bcc328"}, - {file = "Markdown-3.3.3.tar.gz", hash = "sha256:5d9f2b5ca24bc4c7a390d22323ca4bad200368612b5aaa7796babf971d2b2f18"}, + {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, + {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -1282,23 +1272,23 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-6.2.7.tar.gz", hash = "sha256:6bef9077527be75a60883c60b07a41e59f33b5400009ea10485337f07b981e24"}, - {file = "mkdocs_material-6.2.7-py2.py3-none-any.whl", hash = "sha256:11241769c4bb6ca2a8f0bad7f1ea3bcfba021f9bbe59e2456b5465e759c2202a"}, + {file = "mkdocs-material-6.2.8.tar.gz", hash = "sha256:ce2f4a71e5db49540d71fd32f9afba7645765f7eca391e560d1d27f947eb344c"}, + {file = "mkdocs_material-6.2.8-py2.py3-none-any.whl", hash = "sha256:c9b63d709d29778aa3dafc7178b6a8c655b00937be2594aab016d1423696c792"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, {file = "mkdocs_material_extensions-1.0.1-py3-none-any.whl", hash = "sha256:d90c807a88348aa6d1805657ec5c0b2d8d609c110e62b9dce4daf7fa981fa338"}, ] more-itertools = [ - {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, - {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, + {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, + {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, ] nltk = [ {file = "nltk-3.5.zip", hash = "sha256:845365449cd8c5f9731f7cb9f8bd6fd0767553b9d53af9eb1b3abf7700936b35"}, ] packaging = [ - {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, - {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pathspec = [ {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, @@ -1316,44 +1306,44 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] py = [ - {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, - {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, ] pydantic = [ - {file = "pydantic-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dfaa6ed1d509b5aef4142084206584280bb6e9014f01df931ec6febdad5b200a"}, - {file = "pydantic-1.7.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2182ba2a9290964b278bcc07a8d24207de709125d520efec9ad6fa6f92ee058d"}, - {file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:0fe8b45d31ae53d74a6aa0bf801587bd49970070eac6a6326f9fa2a302703b8a"}, - {file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:01f0291f4951580f320f7ae3f2ecaf0044cdebcc9b45c5f882a7e84453362420"}, - {file = "pydantic-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4ba6b903e1b7bd3eb5df0e78d7364b7e831ed8b4cd781ebc3c4f1077fbcb72a4"}, - {file = "pydantic-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b11fc9530bf0698c8014b2bdb3bbc50243e82a7fa2577c8cfba660bcc819e768"}, - {file = "pydantic-1.7.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a3c274c49930dc047a75ecc865e435f3df89715c775db75ddb0186804d9b04d0"}, - {file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c68b5edf4da53c98bb1ccb556ae8f655575cb2e676aef066c12b08c724a3f1a1"}, - {file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:95d4410c4e429480c736bba0db6cce5aaa311304aea685ebcf9ee47571bfd7c8"}, - {file = "pydantic-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a2fc7bf77ed4a7a961d7684afe177ff59971828141e608f142e4af858e07dddc"}, - {file = "pydantic-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9572c0db13c8658b4a4cb705dcaae6983aeb9842248b36761b3fbc9010b740f"}, - {file = "pydantic-1.7.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f83f679e727742b0c465e7ef992d6da4a7e5268b8edd8fdaf5303276374bef52"}, - {file = "pydantic-1.7.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:e5fece30e80087d9b7986104e2ac150647ec1658c4789c89893b03b100ca3164"}, - {file = "pydantic-1.7.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce2d452961352ba229fe1e0b925b41c0c37128f08dddb788d0fd73fd87ea0f66"}, - {file = "pydantic-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:fc21a37ff3f545de80b166e1735c4172b41b017948a3fb2d5e2f03c219eac50a"}, - {file = "pydantic-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c9760d1556ec59ff745f88269a8f357e2b7afc75c556b3a87b8dda5bc62da8ba"}, - {file = "pydantic-1.7.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c1673633ad1eea78b1c5c420a47cd48717d2ef214c8230d96ca2591e9e00958"}, - {file = "pydantic-1.7.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:388c0c26c574ff49bad7d0fd6ed82fbccd86a0473fa3900397d3354c533d6ebb"}, - {file = "pydantic-1.7.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ab1d5e4d8de00575957e1c982b951bffaedd3204ddd24694e3baca3332e53a23"}, - {file = "pydantic-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:f045cf7afb3352a03bc6cb993578a34560ac24c5d004fa33c76efec6ada1361a"}, - {file = "pydantic-1.7.2-py3-none-any.whl", hash = "sha256:6665f7ab7fbbf4d3c1040925ff4d42d7549a8c15fe041164adfe4fc2134d4cce"}, - {file = "pydantic-1.7.2.tar.gz", hash = "sha256:c8200aecbd1fb914e1bd061d71a4d1d79ecb553165296af0c14989b89e90d09b"}, + {file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"}, + {file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"}, + {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"}, + {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"}, + {file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"}, + {file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"}, + {file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"}, + {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"}, + {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"}, + {file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"}, + {file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"}, + {file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"}, + {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"}, + {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"}, + {file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"}, + {file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"}, + {file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"}, + {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"}, + {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"}, + {file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"}, + {file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"}, + {file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"}, ] pyflakes = [ {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, ] pygments = [ - {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, - {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, + {file = "Pygments-2.8.0-py3-none-any.whl", hash = "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88"}, + {file = "Pygments-2.8.0.tar.gz", hash = "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0"}, ] pymdown-extensions = [ {file = "pymdown-extensions-8.1.1.tar.gz", hash = "sha256:632371fa3bf1b21a0e3f4063010da59b41db049f261f4c0b0872069a9b6d1735"}, @@ -1371,8 +1361,8 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, - {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pytest-mock = [ {file = "pytest-mock-2.0.0.tar.gz", hash = "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f"}, @@ -1383,19 +1373,27 @@ python-dateutil = [ {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] pyyaml = [ - {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, - {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, - {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, - {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, - {file = "PyYAML-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a"}, - {file = "PyYAML-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e"}, - {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] radon = [ {file = "radon-4.3.2-py2.py3-none-any.whl", hash = "sha256:b991de491eb2edbc2aac8f5f7ebf02b799852f076fa5a73fedf79d144d85e37e"}, @@ -1445,28 +1443,28 @@ regex = [ {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, ] requests = [ - {file = "requests-2.25.0-py2.py3-none-any.whl", hash = "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"}, - {file = "requests-2.25.0.tar.gz", hash = "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"}, + {file = "requests-2.15.1-py2.py3-none-any.whl", hash = "sha256:ff753b2196cd18b1bbeddc9dcd5c864056599f7a7d9a4fb5677e723efa2b7fb9"}, + {file = "requests-2.15.1.tar.gz", hash = "sha256:e5659b9315a0610505e050bb7190bf6fa2ccee1ac295f2b760ef9d8a03ebbb2e"}, ] s3transfer = [ - {file = "s3transfer-0.3.3-py2.py3-none-any.whl", hash = "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13"}, - {file = "s3transfer-0.3.3.tar.gz", hash = "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"}, + {file = "s3transfer-0.3.4-py2.py3-none-any.whl", hash = "sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed"}, + {file = "s3transfer-0.3.4.tar.gz", hash = "sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"}, - {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, + {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, + {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, ] stevedore = [ - {file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"}, - {file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"}, + {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, + {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, ] testfixtures = [ - {file = "testfixtures-6.15.0-py2.py3-none-any.whl", hash = "sha256:e17f4f526fc90b0ac9bc7f8ca62b7dec17d9faf3d721f56bda4f0fd94d02f85a"}, - {file = "testfixtures-6.15.0.tar.gz", hash = "sha256:409f77cfbdad822d12a8ce5c4aa8fb4d0bb38073f4a5444fede3702716a2cec2"}, + {file = "testfixtures-6.17.1-py2.py3-none-any.whl", hash = "sha256:9ed31e83f59619e2fa17df053b241e16e0608f4580f7b5a9333a0c9bdcc99137"}, + {file = "testfixtures-6.17.1.tar.gz", hash = "sha256:5ec3a0dd6f71cc4c304fbc024a10cc293d3e0b852c868014b9f233203e149bda"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, @@ -1516,40 +1514,40 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] tqdm = [ - {file = "tqdm-4.56.0-py2.py3-none-any.whl", hash = "sha256:4621f6823bab46a9cc33d48105753ccbea671b68bab2c50a9f0be23d4065cb5a"}, - {file = "tqdm-4.56.0.tar.gz", hash = "sha256:fe3d08dd00a526850568d542ff9de9bbc2a09a791da3c334f3213d8d0bbbca65"}, + {file = "tqdm-4.58.0-py2.py3-none-any.whl", hash = "sha256:2c44efa73b8914dba7807aefd09653ac63c22b5b4ea34f7a80973f418f1a3089"}, + {file = "tqdm-4.58.0.tar.gz", hash = "sha256:c23ac707e8e8aabb825e4d91f8e17247f9cc14b0d64dd9e97be0781e9e525bba"}, ] typed-ast = [ - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, - {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, - {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, - {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d"}, - {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, - {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d"}, - {file = "typed_ast-1.4.1-cp39-cp39-win32.whl", hash = "sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395"}, - {file = "typed_ast-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c"}, - {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, @@ -1557,8 +1555,8 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, - {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, diff --git a/pyproject.toml b/pyproject.toml index eb45c8418c8..5ee64ed567a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.10.5" +version = "1.11.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed"] diff --git a/pytest.ini b/pytest.ini index 45345cbd365..4f01361ce5e 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,5 @@ [pytest] -addopts = -ra --cov --cov-config=.coveragerc +addopts = -ra -vvv testpaths = ./tests +markers = + perf: marks perf tests to be deselected (deselect with '-m "not perf"') diff --git a/tests/events/apiGatewayProxyV2Event.json b/tests/events/apiGatewayProxyV2Event.json index 9c310e6d52f..4d0cfdf5703 100644 --- a/tests/events/apiGatewayProxyV2Event.json +++ b/tests/events/apiGatewayProxyV2Event.json @@ -45,7 +45,7 @@ "time": "12/Mar/2020:19:03:58 +0000", "timeEpoch": 1583348638390 }, - "body": "Hello from Lambda", + "body": "{\"message\": \"hello world\", \"username\": \"tom\"}", "pathParameters": { "parameter1": "value1" }, diff --git a/tests/events/connectContactFlowEventAll.json b/tests/events/connectContactFlowEventAll.json new file mode 100644 index 00000000000..5850649b6eb --- /dev/null +++ b/tests/events/connectContactFlowEventAll.json @@ -0,0 +1,41 @@ +{ + "Name": "ContactFlowEvent", + "Details": { + "ContactData": { + "Attributes": { + "Language": "en-US" + }, + "Channel": "VOICE", + "ContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "CustomerEndpoint": { + "Address": "+11234567890", + "Type": "TELEPHONE_NUMBER" + }, + "InitialContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "InitiationMethod": "API", + "InstanceARN": "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa", + "MediaStreams": { + "Customer": { + "Audio": { + "StartFragmentNumber": "91343852333181432392682062622220590765191907586", + "StartTimestamp": "1565781909613", + "StreamARN": "arn:aws:kinesisvideo:eu-central-1:123456789012:stream/connect-contact-a3d73b84-ce0e-479a-a9dc-5637c9d30ac9/1565272947806" + } + } + }, + "PreviousContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "Queue": { + "ARN": "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa/queue/5cba7cbf-1ecb-4b6d-b8bd-fe91079b3fc8", + "Name": "QueueOne" + }, + "SystemEndpoint": { + "Address": "+11234567890", + "Type": "TELEPHONE_NUMBER" + } + }, + "Parameters": { + "ParameterOne": "One", + "ParameterTwo": "Two" + } + } +} \ No newline at end of file diff --git a/tests/events/connectContactFlowEventMin.json b/tests/events/connectContactFlowEventMin.json new file mode 100644 index 00000000000..9cc22d59c3f --- /dev/null +++ b/tests/events/connectContactFlowEventMin.json @@ -0,0 +1,27 @@ +{ + "Name": "ContactFlowEvent", + "Details": { + "ContactData": { + "Attributes": {}, + "Channel": "VOICE", + "ContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "CustomerEndpoint": null, + "InitialContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "InitiationMethod": "API", + "InstanceARN": "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa", + "MediaStreams": { + "Customer": { + "Audio": { + "StartFragmentNumber": null, + "StartTimestamp": null, + "StreamARN": null + } + } + }, + "PreviousContactId": "5ca32fbd-8f92-46af-92a5-6b0f970f0efe", + "Queue": null, + "SystemEndpoint": null + }, + "Parameters": {} + } +} \ No newline at end of file diff --git a/tests/functional/idempotency/__init__.py b/tests/functional/idempotency/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py new file mode 100644 index 00000000000..532d551ef40 --- /dev/null +++ b/tests/functional/idempotency/conftest.py @@ -0,0 +1,197 @@ +import datetime +import hashlib +import json +import os +from decimal import Decimal +from unittest import mock + +import jmespath +import pytest +from botocore import stub +from botocore.config import Config +from jmespath import functions + +from aws_lambda_powertools.shared.json_encoder import Encoder +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer +from aws_lambda_powertools.utilities.idempotency.idempotency import IdempotencyConfig +from aws_lambda_powertools.utilities.validation import envelopes +from aws_lambda_powertools.utilities.validation.base import unwrap_event_from_envelope + +TABLE_NAME = "TEST_TABLE" + + +@pytest.fixture(scope="module") +def config() -> Config: + return Config(region_name="us-east-1") + + +@pytest.fixture(scope="module") +def lambda_apigw_event(): + full_file_name = os.path.dirname(os.path.realpath(__file__)) + "/../../events/" + "apiGatewayProxyV2Event.json" + with open(full_file_name) as fp: + event = json.load(fp) + + return event + + +@pytest.fixture +def timestamp_future(): + return str(int((datetime.datetime.now() + datetime.timedelta(seconds=3600)).timestamp())) + + +@pytest.fixture +def timestamp_expired(): + now = datetime.datetime.now() + period = datetime.timedelta(seconds=6400) + return str(int((now - period).timestamp())) + + +@pytest.fixture(scope="module") +def lambda_response(): + return {"message": "test", "statusCode": 200, "decimal_val": Decimal("2.5"), "decimal_NaN": Decimal("NaN")} + + +@pytest.fixture(scope="module") +def serialized_lambda_response(lambda_response): + return json.dumps(lambda_response, cls=Encoder) + + +@pytest.fixture(scope="module") +def deserialized_lambda_response(lambda_response): + return json.loads(json.dumps(lambda_response, cls=Encoder)) + + +@pytest.fixture +def default_jmespath(): + return "[body, queryStringParameters]" + + +@pytest.fixture +def expected_params_update_item(serialized_lambda_response, hashed_idempotency_key): + return { + "ExpressionAttributeNames": {"#expiry": "expiration", "#response_data": "data", "#status": "status"}, + "ExpressionAttributeValues": { + ":expiry": stub.ANY, + ":response_data": serialized_lambda_response, + ":status": "COMPLETED", + }, + "Key": {"id": hashed_idempotency_key}, + "TableName": "TEST_TABLE", + "UpdateExpression": "SET #response_data = :response_data, " "#expiry = :expiry, #status = :status", + } + + +@pytest.fixture +def expected_params_update_item_with_validation( + serialized_lambda_response, hashed_idempotency_key, hashed_validation_key +): + return { + "ExpressionAttributeNames": { + "#expiry": "expiration", + "#response_data": "data", + "#status": "status", + "#validation_key": "validation", + }, + "ExpressionAttributeValues": { + ":expiry": stub.ANY, + ":response_data": serialized_lambda_response, + ":status": "COMPLETED", + ":validation_key": hashed_validation_key, + }, + "Key": {"id": hashed_idempotency_key}, + "TableName": "TEST_TABLE", + "UpdateExpression": "SET #response_data = :response_data, " + "#expiry = :expiry, #status = :status, " + "#validation_key = :validation_key", + } + + +@pytest.fixture +def expected_params_put_item(hashed_idempotency_key): + return { + "ConditionExpression": "attribute_not_exists(id) OR expiration < :now", + "ExpressionAttributeValues": {":now": stub.ANY}, + "Item": {"expiration": stub.ANY, "id": hashed_idempotency_key, "status": "INPROGRESS"}, + "TableName": "TEST_TABLE", + } + + +@pytest.fixture +def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_validation_key): + return { + "ConditionExpression": "attribute_not_exists(id) OR expiration < :now", + "ExpressionAttributeValues": {":now": stub.ANY}, + "Item": { + "expiration": stub.ANY, + "id": hashed_idempotency_key, + "status": "INPROGRESS", + "validation": hashed_validation_key, + }, + "TableName": "TEST_TABLE", + } + + +@pytest.fixture +def hashed_idempotency_key(lambda_apigw_event, default_jmespath): + compiled_jmespath = jmespath.compile(default_jmespath) + data = compiled_jmespath.search(lambda_apigw_event) + return hashlib.md5(json.dumps(data).encode()).hexdigest() + + +@pytest.fixture +def hashed_idempotency_key_with_envelope(lambda_apigw_event): + event = unwrap_event_from_envelope( + data=lambda_apigw_event, envelope=envelopes.API_GATEWAY_HTTP, jmespath_options={} + ) + return hashlib.md5(json.dumps(event).encode()).hexdigest() + + +@pytest.fixture +def hashed_validation_key(lambda_apigw_event): + return hashlib.md5(json.dumps(lambda_apigw_event["requestContext"]).encode()).hexdigest() + + +@pytest.fixture +def persistence_store(config): + return DynamoDBPersistenceLayer(table_name=TABLE_NAME, boto_config=config) + + +@pytest.fixture +def idempotency_config(config, request, default_jmespath): + return IdempotencyConfig( + event_key_jmespath=request.param.get("event_key_jmespath") or default_jmespath, + use_local_cache=request.param["use_local_cache"], + ) + + +@pytest.fixture +def config_without_jmespath(config, request): + return IdempotencyConfig(use_local_cache=request.param["use_local_cache"]) + + +@pytest.fixture +def config_with_validation(config, request, default_jmespath): + return IdempotencyConfig( + event_key_jmespath=default_jmespath, + use_local_cache=request.param, + payload_validation_jmespath="requestContext", + ) + + +@pytest.fixture +def config_with_jmespath_options(config, request): + class CustomFunctions(functions.Functions): + @functions.signature({"types": ["string"]}) + def _func_echo_decoder(self, value): + return value + + return IdempotencyConfig( + use_local_cache=False, + event_key_jmespath=request.param, + jmespath_options={"custom_functions": CustomFunctions()}, + ) + + +@pytest.fixture +def mock_function(): + return mock.MagicMock() diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py new file mode 100644 index 00000000000..6def9b4868a --- /dev/null +++ b/tests/functional/idempotency/test_idempotency.py @@ -0,0 +1,777 @@ +import copy +import json +import sys +from hashlib import md5 + +import jmespath +import pytest +from botocore import stub + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyAlreadyInProgressError, + IdempotencyInconsistentStateError, + IdempotencyInvalidStatusError, + IdempotencyKeyError, + IdempotencyPersistenceLayerError, + IdempotencyValidationError, +) +from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent +from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord +from aws_lambda_powertools.utilities.validation import envelopes, validator + +TABLE_NAME = "TEST_TABLE" + + +# Using parametrize to run test twice, with two separate instances of persistence store. One instance with caching +# enabled, and one without. +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_already_completed( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + hashed_idempotency_key, + serialized_lambda_response, + deserialized_lambda_response, +): + """ + Test idempotent decorator where event with matching event key has already been succesfully processed + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = { + "Item": { + "id": {"S": hashed_idempotency_key}, + "expiration": {"N": timestamp_future}, + "data": {"S": serialized_lambda_response}, + "status": {"S": "COMPLETED"}, + } + } + + expected_params = { + "TableName": TABLE_NAME, + "Key": {"id": hashed_idempotency_key}, + "ConsistentRead": True, + } + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response, expected_params) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + raise Exception + + lambda_resp = lambda_handler(lambda_apigw_event, {}) + assert lambda_resp == deserialized_lambda_response + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_in_progress( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + lambda_response, + timestamp_future, + hashed_idempotency_key, +): + """ + Test idempotent decorator where lambda_handler is already processing an event with matching event key + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + + expected_params = { + "TableName": TABLE_NAME, + "Key": {"id": hashed_idempotency_key}, + "ConsistentRead": True, + } + ddb_response = { + "Item": { + "id": {"S": hashed_idempotency_key}, + "expiration": {"N": timestamp_future}, + "status": {"S": "INPROGRESS"}, + } + } + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response, expected_params) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + with pytest.raises(IdempotencyAlreadyInProgressError) as ex: + lambda_handler(lambda_apigw_event, {}) + assert ( + ex.value.args[0] == "Execution already in progress with idempotency key: " + "body=a3edd699125517bb49d562501179ecbd" + ) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.skipif(sys.version_info < (3, 8), reason="issue with pytest mock lib for < 3.8") +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_in_progress_with_cache( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + lambda_response, + timestamp_future, + hashed_idempotency_key, + mocker, +): + """ + Test idempotent decorator where lambda_handler is already processing an event with matching event key, cache + enabled. + """ + save_to_cache_spy = mocker.spy(persistence_store, "_save_to_cache") + retrieve_from_cache_spy = mocker.spy(persistence_store, "_retrieve_from_cache") + stubber = stub.Stubber(persistence_store.table.meta.client) + + expected_params = { + "TableName": TABLE_NAME, + "Key": {"id": hashed_idempotency_key}, + "ConsistentRead": True, + } + ddb_response = { + "Item": { + "id": {"S": hashed_idempotency_key}, + "expiration": {"N": timestamp_future}, + "status": {"S": "INPROGRESS"}, + } + } + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response, expected_params) + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", copy.deepcopy(ddb_response), copy.deepcopy(expected_params)) + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", copy.deepcopy(ddb_response), copy.deepcopy(expected_params)) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + loops = 3 + for _ in range(loops): + with pytest.raises(IdempotencyAlreadyInProgressError) as ex: + lambda_handler(lambda_apigw_event, {}) + assert ( + ex.value.args[0] == "Execution already in progress with idempotency key: " + "body=a3edd699125517bb49d562501179ecbd" + ) + + assert retrieve_from_cache_spy.call_count == 2 * loops + retrieve_from_cache_spy.assert_called_with(idempotency_key=hashed_idempotency_key) + + save_to_cache_spy.assert_called() + assert persistence_store._cache.get(hashed_idempotency_key) is None + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_first_execution( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + expected_params_update_item, + expected_params_put_item, + lambda_response, + serialized_lambda_response, + deserialized_lambda_response, + hashed_idempotency_key, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_response("update_item", ddb_response, expected_params_update_item) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.skipif(sys.version_info < (3, 8), reason="issue with pytest mock lib for < 3.8") +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_first_execution_cached( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event: DynamoDBPersistenceLayer, + expected_params_update_item, + expected_params_put_item, + lambda_response, + hashed_idempotency_key, + mocker, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key. Ensure + result is cached locally on the persistence store instance. + """ + save_to_cache_spy = mocker.spy(persistence_store, "_save_to_cache") + retrieve_from_cache_spy = mocker.spy(persistence_store, "_retrieve_from_cache") + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_response("update_item", ddb_response, expected_params_update_item) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + lambda_handler(lambda_apigw_event, {}) + + retrieve_from_cache_spy.assert_called_once() + save_to_cache_spy.assert_called_once() + assert save_to_cache_spy.call_args[0][0].status == "COMPLETED" + assert persistence_store._cache.get(hashed_idempotency_key).status == "COMPLETED" + + # This lambda call should not call AWS API + lambda_handler(lambda_apigw_event, {}) + assert retrieve_from_cache_spy.call_count == 3 + retrieve_from_cache_spy.assert_called_with(idempotency_key=hashed_idempotency_key) + + # This assertion fails if an AWS API operation was called more than once + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_expired( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_expired, + lambda_response, + expected_params_update_item, + expected_params_put_item, + hashed_idempotency_key, +): + """ + Test idempotent decorator when lambda is called with an event it succesfully handled already, but outside of the + expiry window + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_response("update_item", ddb_response, expected_params_update_item) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_exception( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + lambda_response, + hashed_idempotency_key, + expected_params_put_item, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but + lambda_handler raises an exception which is retryable. + """ + + # Create a new provider + + # Stub the boto3 client + stubber = stub.Stubber(persistence_store.table.meta.client) + + ddb_response = {} + expected_params_delete_item = {"TableName": TABLE_NAME, "Key": {"id": hashed_idempotency_key}} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_response("delete_item", ddb_response, expected_params_delete_item) + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + raise Exception("Something went wrong!") + + with pytest.raises(Exception): + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize( + "config_with_validation", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True +) +def test_idempotent_lambda_already_completed_with_validation_bad_payload( + config_with_validation: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + lambda_response, + hashed_idempotency_key, + hashed_validation_key, +): + """ + Test idempotent decorator where event with matching event key has already been successfully processed + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = { + "Item": { + "id": {"S": hashed_idempotency_key}, + "expiration": {"N": timestamp_future}, + "data": {"S": '{"message": "test", "statusCode": 200}'}, + "status": {"S": "COMPLETED"}, + "validation": {"S": hashed_validation_key}, + } + } + + expected_params = {"TableName": TABLE_NAME, "Key": {"id": hashed_idempotency_key}, "ConsistentRead": True} + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response, expected_params) + stubber.activate() + + @idempotent(config=config_with_validation, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + with pytest.raises(IdempotencyValidationError): + lambda_apigw_event["requestContext"]["accountId"] += "1" # Alter the request payload + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_lambda_expired_during_request( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_expired, + lambda_response, + expected_params_update_item, + hashed_idempotency_key, +): + """ + Test idempotent decorator when lambda is called with an event it succesfully handled already. Persistence store + returns inconsistent/rapidly changing result between put_item and get_item calls. + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + + ddb_response_get_item = { + "Item": { + "id": {"S": hashed_idempotency_key}, + "expiration": {"N": timestamp_expired}, + "data": {"S": '{"message": "test", "statusCode": 200}'}, + "status": {"S": "INPROGRESS"}, + } + } + ddb_response_get_item_missing = {} + expected_params_get_item = { + "TableName": TABLE_NAME, + "Key": {"id": hashed_idempotency_key}, + "ConsistentRead": True, + } + + # Simulate record repeatedly changing state between put_item and get_item + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response_get_item, expected_params_get_item) + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response_get_item_missing) + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", copy.deepcopy(ddb_response_get_item), copy.deepcopy(expected_params_get_item)) + + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + # max retries exceeded before get_item and put_item agree on item state, so exception gets raised + with pytest.raises(IdempotencyInconsistentStateError): + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_persistence_exception_deleting( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + lambda_response, + hashed_idempotency_key, + expected_params_put_item, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but + lambda_handler raises an exception which is retryable. + """ + stubber = stub.Stubber(persistence_store.table.meta.client) + + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_client_error("delete_item", "UnrecoverableError") + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + raise Exception("Something went wrong!") + + with pytest.raises(IdempotencyPersistenceLayerError) as exc: + lambda_handler(lambda_apigw_event, {}) + + assert exc.value.args[0] == "Failed to delete record from idempotency store" + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_persistence_exception_updating( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + lambda_response, + hashed_idempotency_key, + expected_params_put_item, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but + lambda_handler raises an exception which is retryable. + """ + stubber = stub.Stubber(persistence_store.table.meta.client) + + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item) + stubber.add_client_error("update_item", "UnrecoverableError") + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return {"message": "success!"} + + with pytest.raises(IdempotencyPersistenceLayerError) as exc: + lambda_handler(lambda_apigw_event, {}) + + assert exc.value.args[0] == "Failed to update record state to success in idempotency store" + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True) +def test_idempotent_persistence_exception_getting( + idempotency_config: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + lambda_response, + hashed_idempotency_key, + expected_params_put_item, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key, but + lambda_handler raises an exception which is retryable. + """ + stubber = stub.Stubber(persistence_store.table.meta.client) + + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_client_error("get_item", "UnexpectedException") + stubber.activate() + + @idempotent(config=idempotency_config, persistence_store=persistence_store) + def lambda_handler(event, context): + return {"message": "success!"} + + with pytest.raises(IdempotencyPersistenceLayerError) as exc: + lambda_handler(lambda_apigw_event, {}) + + assert exc.value.args[0] == "Failed to get record from idempotency store" + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize( + "config_with_validation", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True +) +def test_idempotent_lambda_first_execution_with_validation( + config_with_validation: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + expected_params_update_item_with_validation, + expected_params_put_item_with_validation, + lambda_response, + hashed_idempotency_key, + hashed_validation_key, +): + """ + Test idempotent decorator when lambda is executed with an event with a previously unknown event key + """ + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = {} + + stubber.add_response("put_item", ddb_response, expected_params_put_item_with_validation) + stubber.add_response("update_item", ddb_response, expected_params_update_item_with_validation) + stubber.activate() + + @idempotent(config=config_with_validation, persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + lambda_handler(lambda_apigw_event, {}) + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +@pytest.mark.parametrize( + "config_without_jmespath", [{"use_local_cache": False}, {"use_local_cache": True}], indirect=True +) +def test_idempotent_lambda_with_validator_util( + config_without_jmespath: IdempotencyConfig, + persistence_store: DynamoDBPersistenceLayer, + lambda_apigw_event, + timestamp_future, + serialized_lambda_response, + deserialized_lambda_response, + hashed_idempotency_key_with_envelope, + mock_function, +): + """ + Test idempotent decorator where event with matching event key has already been succesfully processed, using the + validator utility to unwrap the event + """ + + stubber = stub.Stubber(persistence_store.table.meta.client) + ddb_response = { + "Item": { + "id": {"S": hashed_idempotency_key_with_envelope}, + "expiration": {"N": timestamp_future}, + "data": {"S": serialized_lambda_response}, + "status": {"S": "COMPLETED"}, + } + } + + expected_params = { + "TableName": TABLE_NAME, + "Key": {"id": hashed_idempotency_key_with_envelope}, + "ConsistentRead": True, + } + stubber.add_client_error("put_item", "ConditionalCheckFailedException") + stubber.add_response("get_item", ddb_response, expected_params) + stubber.activate() + + @validator(envelope=envelopes.API_GATEWAY_HTTP) + @idempotent(config=config_without_jmespath, persistence_store=persistence_store) + def lambda_handler(event, context): + mock_function() + return "shouldn't get here!" + + mock_function.assert_not_called() + lambda_resp = lambda_handler(lambda_apigw_event, {}) + assert lambda_resp == deserialized_lambda_response + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +def test_data_record_invalid_status_value(): + data_record = DataRecord("key", status="UNSUPPORTED_STATUS") + with pytest.raises(IdempotencyInvalidStatusError) as e: + _ = data_record.status + + assert e.value.args[0] == "UNSUPPORTED_STATUS" + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) +def test_in_progress_never_saved_to_cache( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN a data record with status "INPROGRESS" + # and persistence_store has use_local_cache = True + persistence_store.configure(idempotency_config) + data_record = DataRecord("key", status="INPROGRESS") + + # WHEN saving to local cache + persistence_store._save_to_cache(data_record) + + # THEN don't save to local cache + assert persistence_store._cache.get("key") is None + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": False}], indirect=True) +def test_user_local_disabled(idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer): + # GIVEN a persistence_store with use_local_cache = False + persistence_store.configure(idempotency_config) + + # WHEN calling any local cache options + data_record = DataRecord("key", status="COMPLETED") + try: + persistence_store._save_to_cache(data_record) + cache_value = persistence_store._retrieve_from_cache("key") + assert cache_value is None + persistence_store._delete_from_cache("key") + except AttributeError as e: + pytest.fail(f"AttributeError should not be raised: {e}") + + # THEN raise AttributeError + # AND don't have a _cache attribute + assert not hasattr("persistence_store", "_cache") + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) +def test_delete_from_cache_when_empty( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN use_local_cache is True AND the local cache is empty + persistence_store.configure(idempotency_config) + + try: + # WHEN we _delete_from_cache + persistence_store._delete_from_cache("key_does_not_exist") + except KeyError: + # THEN we should not get a KeyError + pytest.fail("KeyError should not happen") + + +def test_is_missing_idempotency_key(): + # GIVEN None THEN is_missing_idempotency_key is True + assert BasePersistenceLayer.is_missing_idempotency_key(None) + # GIVEN a list of Nones THEN is_missing_idempotency_key is True + assert BasePersistenceLayer.is_missing_idempotency_key([None, None]) + # GIVEN a list of all not None THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key([None, "Value"]) is False + # GIVEN a str THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key("Value") is False + # GIVEN an empty tuple THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key(()) + # GIVEN an empty list THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key([]) + # GIVEN an empty dictionary THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key({}) + # GIVEN an empty str THEN is_missing_idempotency_key is false + assert BasePersistenceLayer.is_missing_idempotency_key("") + + +@pytest.mark.parametrize( + "idempotency_config", [{"use_local_cache": False, "event_key_jmespath": "body"}], indirect=True +) +def test_default_no_raise_on_missing_idempotency_key( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN a persistence_store with use_local_cache = False and event_key_jmespath = "body" + persistence_store.configure(idempotency_config) + assert persistence_store.use_local_cache is False + assert "body" in persistence_store.event_key_jmespath + + # WHEN getting the hashed idempotency key for an event with no `body` key + hashed_key = persistence_store._get_hashed_idempotency_key({}) + + # THEN return the hash of None + assert md5(json.dumps(None).encode()).hexdigest() == hashed_key + + +@pytest.mark.parametrize( + "idempotency_config", [{"use_local_cache": False, "event_key_jmespath": "[body, x]"}], indirect=True +) +def test_raise_on_no_idempotency_key( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN a persistence_store with raise_on_no_idempotency_key and no idempotency key in the request + persistence_store.configure(idempotency_config) + persistence_store.raise_on_no_idempotency_key = True + assert persistence_store.use_local_cache is False + assert "body" in persistence_store.event_key_jmespath + + # WHEN getting the hashed idempotency key for an event with no `body` key + with pytest.raises(IdempotencyKeyError) as excinfo: + persistence_store._get_hashed_idempotency_key({}) + + # THEN raise IdempotencyKeyError error + assert "No data found to create a hashed idempotency_key" in str(excinfo.value) + + +@pytest.mark.parametrize( + "idempotency_config", + [ + { + "use_local_cache": False, + "event_key_jmespath": "[requestContext.authorizer.claims.sub, powertools_json(body).id]", + } + ], + indirect=True, +) +def test_jmespath_with_powertools_json( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN an event_key_jmespath with powertools_json custom function + persistence_store.configure(idempotency_config) + sub_attr_value = "cognito_user" + key_attr_value = "some_key" + expected_value = [sub_attr_value, key_attr_value] + api_gateway_proxy_event = { + "requestContext": {"authorizer": {"claims": {"sub": sub_attr_value}}}, + "body": json.dumps({"id": key_attr_value}), + } + + # WHEN calling _get_hashed_idempotency_key + result = persistence_store._get_hashed_idempotency_key(api_gateway_proxy_event) + + # THEN the hashed idempotency key should match the extracted values generated hash + assert result == persistence_store._generate_hash(expected_value) + + +@pytest.mark.parametrize("config_with_jmespath_options", ["powertools_json(data).payload"], indirect=True) +def test_custom_jmespath_function_overrides_builtin_functions( + config_with_jmespath_options: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # GIVEN an persistence store with a custom jmespath_options + # AND use a builtin powertools custom function + persistence_store.configure(config_with_jmespath_options) + + with pytest.raises(jmespath.exceptions.UnknownFunctionError, match="Unknown function: powertools_json()"): + # WHEN calling _get_hashed_idempotency_key + # THEN raise unknown function + persistence_store._get_hashed_idempotency_key({}) diff --git a/tests/functional/test_lambda_trigger_events.py b/tests/functional/test_lambda_trigger_events.py index d6d225bf530..a6fb82970fc 100644 --- a/tests/functional/test_lambda_trigger_events.py +++ b/tests/functional/test_lambda_trigger_events.py @@ -29,6 +29,12 @@ VerifyAuthChallengeResponseTriggerEvent, ) from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper +from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( + ConnectContactFlowChannel, + ConnectContactFlowEndpointType, + ConnectContactFlowEvent, + ConnectContactFlowInitiationMethod, +) from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( AttributeValue, DynamoDBRecordEventName, @@ -54,6 +60,9 @@ def message(self) -> str: assert DataClassSample(data1) == DataClassSample(data1) assert DataClassSample(data1) != DataClassSample(data2) + # Comparing against a dict should not be equals + assert DataClassSample(data1) != data1 + assert data1 != DataClassSample(data1) assert DataClassSample(data1) is not data1 assert data1 is not DataClassSample(data1) @@ -315,6 +324,67 @@ def test_verify_auth_challenge_response_trigger_event(): assert event.response.answer_correct is True +def test_connect_contact_flow_event_min(): + event = ConnectContactFlowEvent(load_event("connectContactFlowEventMin.json")) + + assert event.contact_data.attributes == {} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.customer_endpoint is None + assert event.contact_data.initial_contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API + assert ( + event.contact_data.instance_arn + == "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa" + ) + assert event.contact_data.media_streams.customer.audio.start_fragment_number is None + assert event.contact_data.media_streams.customer.audio.start_timestamp is None + assert event.contact_data.media_streams.customer.audio.stream_arn is None + assert event.contact_data.previous_contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.queue is None + assert event.contact_data.system_endpoint is None + assert event.parameters == {} + + +def test_connect_contact_flow_event_all(): + event = ConnectContactFlowEvent(load_event("connectContactFlowEventAll.json")) + + assert event.contact_data.attributes == {"Language": "en-US"} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.customer_endpoint is not None + assert event.contact_data.customer_endpoint.address == "+11234567890" + assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.contact_data.initial_contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API + assert ( + event.contact_data.instance_arn + == "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa" + ) + assert ( + event.contact_data.media_streams.customer.audio.start_fragment_number + == "91343852333181432392682062622220590765191907586" + ) + assert event.contact_data.media_streams.customer.audio.start_timestamp == "1565781909613" + assert ( + event.contact_data.media_streams.customer.audio.stream_arn + == "arn:aws:kinesisvideo:eu-central-1:123456789012:stream/" + + "connect-contact-a3d73b84-ce0e-479a-a9dc-5637c9d30ac9/1565272947806" + ) + assert event.contact_data.previous_contact_id == "5ca32fbd-8f92-46af-92a5-6b0f970f0efe" + assert event.contact_data.queue is not None + assert ( + event.contact_data.queue.arn + == "arn:aws:connect:eu-central-1:123456789012:instance/9308c2a1-9bc6-4cea-8290-6c0b4a6d38fa/" + + "queue/5cba7cbf-1ecb-4b6d-b8bd-fe91079b3fc8" + ) + assert event.contact_data.queue.name == "QueueOne" + assert event.contact_data.system_endpoint is not None + assert event.contact_data.system_endpoint.address == "+11234567890" + assert event.contact_data.system_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.parameters == {"ParameterOne": "One", "ParameterTwo": "Two"} + + def test_dynamo_db_stream_trigger_event(): event = DynamoDBStreamEvent(load_event("dynamoStreamEvent.json")) diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index a3d471ab305..6386e76e42f 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -77,7 +77,7 @@ def metadata() -> Dict[str, str]: @pytest.fixture -def a_hundred_metrics(namespace=namespace) -> List[Dict[str, str]]: +def a_hundred_metrics() -> List[Dict[str, str]]: return [{"name": f"metric_{i}", "unit": "Count", "value": 1} for i in range(100)] @@ -257,9 +257,9 @@ def test_schema_validation_no_namespace(metric, dimension): # GIVEN we don't add any namespace # WHEN we attempt to serialize a valid EMF object # THEN it should fail namespace validation - with pytest.raises(SchemaValidationError, match=".*Namespace must be string"): - with single_metric(**metric): - pass + with pytest.raises(SchemaValidationError, match="Must contain a metric namespace."): + with single_metric(**metric) as my_metric: + my_metric.add_dimension(**dimension) def test_schema_validation_incorrect_metric_value(metric, dimension, namespace): @@ -268,7 +268,7 @@ def test_schema_validation_incorrect_metric_value(metric, dimension, namespace): # WHEN we attempt to serialize a valid EMF object # THEN it should fail validation and raise SchemaValidationError - with pytest.raises(MetricValueError): + with pytest.raises(MetricValueError, match=".*is not a valid number"): with single_metric(**metric): pass @@ -278,7 +278,7 @@ def test_schema_no_metrics(service, namespace): my_metrics = Metrics(service=service, namespace=namespace) # THEN it should fail validation and raise SchemaValidationError - with pytest.raises(SchemaValidationError, match=".*Metrics must contain at least 1 items"): + with pytest.raises(SchemaValidationError, match="Must contain at least one metric."): my_metrics.serialize_metric_set() @@ -288,7 +288,7 @@ def test_exceed_number_of_dimensions(metric, namespace): # WHEN we attempt to serialize them into a valid EMF object # THEN it should fail validation and raise SchemaValidationError - with pytest.raises(SchemaValidationError, match="must contain less than or equal to 9 items"): + with pytest.raises(SchemaValidationError, match="Maximum number of dimensions exceeded.*"): with single_metric(**metric, namespace=namespace) as my_metric: for dimension in dimensions: my_metric.add_dimension(**dimension) @@ -328,7 +328,7 @@ def lambda_handler(evt, context): # THEN the raised exception should be SchemaValidationError # and specifically about the lack of Metrics - with pytest.raises(SchemaValidationError, match=".*Metrics must contain at least 1 items"): + with pytest.raises(SchemaValidationError, match="Must contain at least one metric."): lambda_handler({}, {}) diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index 045b7fbbe18..5a915f574ae 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -1534,6 +1534,32 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: assert str_value == json.dumps(mock_body_json) +def test_appconf_get_app_config_new(monkeypatch, mock_name, mock_value): + # GIVEN + class TestProvider(BaseProvider): + def __init__(self, environment: str, application: str): + super().__init__() + + def get(self, name: str, **kwargs) -> str: + return mock_value + + def _get(self, name: str, **kwargs) -> str: + raise NotImplementedError() + + def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: + raise NotImplementedError() + + monkeypatch.setattr(parameters.appconfig, "DEFAULT_PROVIDERS", {}) + monkeypatch.setattr(parameters.appconfig, "AppConfigProvider", TestProvider) + + # WHEN + value = parameters.get_app_config(mock_name, environment="dev", application="myapp") + + # THEN + assert parameters.appconfig.DEFAULT_PROVIDERS["appconfig"] is not None + assert value == mock_value + + def test_transform_value_json(mock_value): """ Test transform_value() with a json transform diff --git a/tests/performance/__init__.py b/tests/performance/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py new file mode 100644 index 00000000000..30cb371ca87 --- /dev/null +++ b/tests/performance/conftest.py @@ -0,0 +1,18 @@ +import time +from contextlib import contextmanager +from typing import Generator + + +@contextmanager +def timing() -> Generator: + """ "Generator to quickly time operations. It can add 5ms so take that into account in elapsed time + + Examples + -------- + + with timing() as t: + print("something") + elapsed = t() + """ + start = time.perf_counter() + yield lambda: time.perf_counter() - start # gen as lambda to calculate elapsed time diff --git a/tests/performance/test_high_level_imports.py b/tests/performance/test_high_level_imports.py new file mode 100644 index 00000000000..70a8d993bdf --- /dev/null +++ b/tests/performance/test_high_level_imports.py @@ -0,0 +1,95 @@ +import importlib +import time +from contextlib import contextmanager +from types import ModuleType +from typing import Generator, Tuple + +import pytest + +LOGGER_INIT_SLA: float = 0.001 +METRICS_INIT_SLA: float = 0.005 +TRACER_INIT_SLA: float = 0.5 +IMPORT_INIT_SLA: float = 0.035 + + +@contextmanager +def timing() -> Generator: + """ "Generator to quickly time operations. It can add 5ms so take that into account in elapsed time + + Examples + -------- + + with timing() as t: + print("something") + elapsed = t() + """ + start = time.perf_counter() + yield lambda: time.perf_counter() - start # gen as lambda to calculate elapsed time + + +def core_utilities() -> Tuple[ModuleType, ModuleType, ModuleType]: + """Return Tracing, Logging, and Metrics module""" + tracing = importlib.import_module("aws_lambda_powertools.tracing") + logging = importlib.import_module("aws_lambda_powertools.logging") + metrics = importlib.import_module("aws_lambda_powertools.metrics") + + return tracing, logging, metrics + + +@pytest.mark.perf +def test_import_times_ceiling(): + # GIVEN Core utilities are imported + # WHEN none are used + # THEN import and any global initialization perf should be below 30ms + # though we adjust to 35ms to take into account different CI machines, etc. + # instead of re-running tests which can lead to false positives + with timing() as t: + core_utilities() + + elapsed = t() + if elapsed > IMPORT_INIT_SLA: + pytest.fail(f"High level imports should be below 35ms: {elapsed}") + + +@pytest.mark.perf +def test_tracer_init(): + # GIVEN Tracer is initialized + # WHEN default options are used + # THEN initialization X-Ray SDK perf should be below 450ms + # though we adjust to 500ms to take into account different CI machines, etc. + # instead of re-running tests which can lead to false positives + with timing() as t: + tracing, _, _ = core_utilities() + tracing.Tracer(disabled=True) # boto3 takes ~200ms, and remaining is X-Ray SDK init + + elapsed = t() + if elapsed > TRACER_INIT_SLA: + pytest.fail(f"High level imports should be below 50ms: {elapsed}") + + +@pytest.mark.perf +def test_metrics_init(): + # GIVEN Metrics is initialized + # WHEN default options are used + # THEN initialization perf should be below 5ms + with timing() as t: + _, _, metrics = core_utilities() + metrics.Metrics() + + elapsed = t() + if elapsed > METRICS_INIT_SLA: + pytest.fail(f"High level imports should be below 40ms: {elapsed}") + + +@pytest.mark.perf +def test_logger_init(): + # GIVEN Logger is initialized + # WHEN default options are used + # THEN initialization perf should be below 5ms + with timing() as t: + _, logging, _ = core_utilities() + logging.Logger() + + elapsed = t() + if elapsed > LOGGER_INIT_SLA: + pytest.fail(f"High level imports should be below 40ms: {elapsed}") diff --git a/tests/performance/test_metrics.py b/tests/performance/test_metrics.py new file mode 100644 index 00000000000..e01a11a5573 --- /dev/null +++ b/tests/performance/test_metrics.py @@ -0,0 +1,87 @@ +import json +import time +from contextlib import contextmanager +from typing import Dict, Generator + +import pytest + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit +from aws_lambda_powertools.metrics import metrics as metrics_global + +# adjusted for slower machines in CI too +METRICS_VALIDATION_SLA: float = 0.0019 +METRICS_SERIALIZATION_SLA: float = 0.0019 + + +@contextmanager +def timing() -> Generator: + """ "Generator to quickly time operations. It can add 5ms so take that into account in elapsed time + + Examples + -------- + + with timing() as t: + print("something") + elapsed = t() + """ + start = time.perf_counter() + yield lambda: time.perf_counter() - start # gen as lambda to calculate elapsed time + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + metrics = Metrics() + metrics.clear_metrics() + metrics_global.is_cold_start = True # ensure each test has cold start + yield + + +@pytest.fixture +def namespace() -> str: + return "test_namespace" + + +@pytest.fixture +def metric() -> Dict[str, str]: + return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} + + +def add_max_metrics_before_serialization(metrics_instance: Metrics): + metrics_instance.add_dimension(name="test_dimension", value="test") + + for i in range(99): + metrics_instance.add_metric(name=f"metric_{i}", unit="Count", value=1) + + +@pytest.mark.perf +def test_metrics_large_operation_without_json_serialization_sla(namespace): + # GIVEN Metrics is initialized + my_metrics = Metrics(namespace=namespace) + + # WHEN we add and serialize 99 metrics + with timing() as t: + add_max_metrics_before_serialization(metrics_instance=my_metrics) + my_metrics.serialize_metric_set() + + # THEN completion time should be below our validation SLA + elapsed = t() + if elapsed > METRICS_VALIDATION_SLA: + pytest.fail(f"Metric validation should be below {METRICS_VALIDATION_SLA}s: {elapsed}") + + +@pytest.mark.perf +def test_metrics_large_operation_and_json_serialization_sla(namespace): + # GIVEN Metrics is initialized with validation disabled + my_metrics = Metrics(namespace=namespace) + + # WHEN we add and serialize 99 metrics + with timing() as t: + add_max_metrics_before_serialization(metrics_instance=my_metrics) + metrics = my_metrics.serialize_metric_set() + print(json.dumps(metrics, separators=(",", ":"))) + + # THEN completion time should be below our serialization SLA + elapsed = t() + if elapsed > METRICS_SERIALIZATION_SLA: + pytest.fail(f"Metric serialization should be below {METRICS_SERIALIZATION_SLA}s: {elapsed}") diff --git a/tests/unit/test_json_encoder.py b/tests/unit/test_json_encoder.py new file mode 100644 index 00000000000..af8de4257a8 --- /dev/null +++ b/tests/unit/test_json_encoder.py @@ -0,0 +1,24 @@ +import decimal +import json + +import pytest + +from aws_lambda_powertools.shared.json_encoder import Encoder + + +def test_jsonencode_decimal(): + result = json.dumps({"val": decimal.Decimal("8.5")}, cls=Encoder) + assert result == '{"val": "8.5"}' + + +def test_jsonencode_decimal_nan(): + result = json.dumps({"val": decimal.Decimal("NaN")}, cls=Encoder) + assert result == '{"val": NaN}' + + +def test_jsonencode_calls_default(): + class CustomClass: + pass + + with pytest.raises(TypeError): + json.dumps({"val": CustomClass()}, cls=Encoder) diff --git a/tests/unit/test_lru_cache.py b/tests/unit/test_lru_cache.py new file mode 100644 index 00000000000..887e20d6270 --- /dev/null +++ b/tests/unit/test_lru_cache.py @@ -0,0 +1,80 @@ +import random + +import pytest + +from aws_lambda_powertools.shared.cache_dict import LRUDict + +MAX_CACHE_ITEMS = 50 +PREFILL_CACHE_ITEMS = 50 + + +@pytest.fixture +def populated_cache(): + cache_dict = LRUDict(max_items=MAX_CACHE_ITEMS, **{f"key_{i}": f"val_{i}" for i in range(0, PREFILL_CACHE_ITEMS)}) + return cache_dict + + +def test_cache_order_init(populated_cache): + first_item = list(populated_cache)[0] + last_item = list(populated_cache)[-1] + + assert first_item == "key_0" + assert last_item == f"key_{MAX_CACHE_ITEMS - 1}" + + +def test_cache_order_getitem(populated_cache): + random_value = random.randrange(0, MAX_CACHE_ITEMS) + _ = populated_cache[f"key_{random_value}"] + + last_item = list(populated_cache)[-1] + + assert last_item == f"key_{random_value}" + + +def test_cache_order_get(populated_cache): + random_value = random.randrange(0, MAX_CACHE_ITEMS) + _ = populated_cache.get(f"key_{random_value}") + + last_item = list(populated_cache)[-1] + + assert last_item == f"key_{random_value}" + + +def test_cache_evict_over_max_items(populated_cache): + assert "key_0" in populated_cache + assert len(populated_cache) == MAX_CACHE_ITEMS + populated_cache["new_item"] = "new_value" + assert len(populated_cache) == MAX_CACHE_ITEMS + assert "key_0" not in populated_cache + assert "key_1" in populated_cache + + +def test_setitem_moves_to_end(populated_cache): + random_value = random.randrange(0, MAX_CACHE_ITEMS) + populated_cache[f"key_{random_value}"] = f"new_val_{random_value}" + last_item = list(populated_cache)[-1] + + assert last_item == f"key_{random_value}" + assert populated_cache[f"key_{random_value}"] == f"new_val_{random_value}" + + +def test_lru_pop_failing(): + cache = LRUDict() + key = "test" + cache[key] = "value" + try: + cache.pop(key, None) + pytest.fail("GitHub #300: LRUDict pop bug has been fixed :)") + except KeyError as e: + assert e.args[0] == key + + +def test_lru_del(): + cache = LRUDict() + key = "test" + cache[key] = "value" + assert len(cache) == 1 + if key in cache: + del cache[key] + assert key not in cache + assert len(cache) == 0