diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..beaf71f1 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +layout pipenv diff --git a/.gitignore b/.gitignore index a65824ab..d5377ab7 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,8 @@ var/ *.egg-info/ .installed.cfg *.egg +requirements-dev.txt +requirements.txt # PyInstaller # Usually these files are written by a python script from a template @@ -60,3 +62,9 @@ target/ # Jetbrains/PyCharm project files .idea/ + +# vim swap files +.*.sw? +aws_lambda/.DS_Store +.DS_Store +.vscode/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e92c7125..ce8cfc67 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,62 @@ -- repo: https://github.com/pre-commit/pre-commit-hooks - sha: v0.9.5 +repos: + - repo: 'https://github.com/pre-commit/pre-commit-hooks' + rev: v2.4.0 hooks: - - id: autopep8-wrapper - - id: check-ast - - id: check-case-conflict - - id: check-merge-conflict - - id: double-quote-string-fixer - - id: end-of-file-fixer - - id: flake8 - - id: requirements-txt-fixer - - id: trailing-whitespace - - id: fix-encoding-pragma - - id: debug-statements -- repo: https://github.com/asottile/reorder_python_imports - sha: v0.3.5 + - id: pretty-format-json + name: 'Pretty format JSON' + args: + - '--no-sort-keys' + - '--autofix' + - '--indent=2' + - id: trailing-whitespace + name: 'Fix trailing whitespace' + exclude: setup.cfg + - id: end-of-file-fixer + name: 'Fix missing EOF' + exclude: setup.cfg + - id: check-executables-have-shebangs + name: 'Check exeutables for shebangs' + - id: check-merge-conflict + name: 'Check for merge conflict fragments' + - id: check-case-conflict + name: 'Check for filesystem character case conflicts' + - id: detect-private-key + name: 'Check for cleartext private keys stored' + - id: flake8 + additional_dependencies: + - flake8-mutable + - flake8-type-annotations + - flake8-eradicate + - flake8-bugbear + name: 'Check for Python style guideline violations' + - id: check-json + name: 'Validate JSON' + - id: check-ast + name: 'Check Python abstract syntax tree' + - repo: 'https://github.com/asottile/reorder_python_imports' + rev: v1.8.0 hooks: - - id: reorder-python-imports - language_version: python3.6 -- repo: https://github.com/Lucas-C/pre-commit-hooks-safety - sha: v1.1.0 + - id: reorder-python-imports + name: 'Reorder Python imports' + - repo: 'https://github.com/pre-commit/mirrors-autopep8' + rev: v1.4.4 hooks: - - id: python-safety-dependencies-check -- repo: https://github.com/asottile/add-trailing-comma - sha: v0.6.4 + - id: autopep8 + name: 'Pretty format Python' + args: + - '--in-place' + - '--aggressive' + - '--aggressive' + - '--experimental' + - '--remove-all-unused-imports' + - '--ignore-init-module-imports' + - '--remove-unused-variable' + - '--ignore E231' + - repo: https://github.com/psf/black + rev: stable hooks: - - id: add-trailing-comma + - id: black + name: 'Ruthlessly format Python' + language_version: python3.7 + args: + - '--line-length=79' diff --git a/.travis.yml b/.travis.yml index 81a8e342..a06a683f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,17 @@ language: python +cache: + - apt + - pip python: - - "2.6" - - "2.7" - - "3.3" - - "3.4" - - "3.5" - "3.6" -install: - - pip install -r requirements.txt - - pip install -r tests/dev_requirements.txt -script: pytest + - "3.7" + - "3.8" +before_install: + - pip install --upgrade pip + - pip install pipenv --upgrade +script: + - pipenv install --dev + - pytest +sudo: false +notifications: + slack: watchcloud:rNoT5kJJakPqwLSKuev6oa4C diff --git a/MANIFEST.in b/MANIFEST.in index f65748af..3a8f5aa3 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,11 +1,11 @@ -include HISTORY.rst +include requirements.txt include LICENSE -include README.rst +include README.md include python/boto/endpoints.json +include *.txt include aws_lambda/project_template/config.yaml recursive-include tests *.json *.py *.txt *.yaml recursive-include templates * recursive-exclude * __pycache__ -recursive-exclude * *.py[co] -recursive-include docs *.rst conf.py Makefile make.bat +recursive-include docs *.md conf.py Makefile make.bat diff --git a/Makefile b/Makefile index ed7320a4..3a8f3410 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,35 @@ -help: - @echo "clean - remove all build, test, coverage and Python artifacts" - @echo "lint - check style with flake8" - @echo "release - package and upload a release" - @echo "install - install the package to the active Python's site-packages" +dev: + pipenv install --dev -clean: clean-build clean-pyc clean-merge +pipenv: + pip install pipenv + pipenv install --dev + +deploy-patch: clean requirements bumpversion-patch upload clean + +deploy-minor: clean requirements bumpversion-minor upload clean + +deploy-major: clean requirements bumpversion-major upload clean + +requirements: + pipenv_to_requirements + +bumpversion-patch: + bumpversion patch + git push + git push --tags + +bumpversion-minor: + bumpversion minor + git push + git push --tags + +bumpversion-major: + bumpversion major + git push + git push --tags + +clean: clean-build clean-pyc clean-build: rm -fr build/ @@ -12,22 +37,17 @@ clean-build: rm -fr .eggs/ find . -name '*.egg-info' -exec rm -fr {} + find . -name '*.egg' -exec rm -f {} + + find . -name '*.DS_Store' -exec rm -f {} + + rm -f requirements.* clean-pyc: find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -fr {} + + find . -name '.pytest_cache' -exec rm -fr {} + + find . -name '.mypy_cache' -exec rm -fr {} + -clean-merge: - find . -name '*.orig' -exec rm -f {} + - -lint: - flake8 python-lambda tests - -release: clean - python setup.py sdist upload - python setup.py bdist_wheel upload - -install: clean - python setup.py install +upload: + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/Pipfile b/Pipfile new file mode 100644 index 00000000..abd01c64 --- /dev/null +++ b/Pipfile @@ -0,0 +1,28 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +twine = "*" +flake8 = "*" +black = "*" +bumpversion = "*" +pipenv-to-requirements = "*" +wheel = "*" +pytest = "*" +codecov = "*" +coveralls = "*" +pytest-cov = "*" + +[packages] +boto3 = ">=1.4.4" +click = "==7.1.2" +docutils = "*" +jmespath = "==0.10.0" +pyaml = "==20.4.0" +python-dateutil = "==2.8.1" +PyYAML = "==5.3.1" + +[pipenv] +allow_prereleases = true diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 00000000..c9c3a70b --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,724 @@ +{ + "_meta": { + "hash": { + "sha256": "7013bd722325666a778f221f5ee2957a554771203428b2a50c40fe07436a04dd" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:7b59ca28e17256ccd900c9a8c02061ce538a24b44b4d816e803e6431f8550ee0", + "sha256:e6915ac164a05367787db2416eeaa8662ae5e0b9c53902dbf23b407686ef5028" + ], + "index": "pypi", + "version": "==1.16.48" + }, + "botocore": { + "hashes": [ + "sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21", + "sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92" + ], + "version": "==1.19.63" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "index": "pypi", + "version": "==7.1.2" + }, + "docutils": { + "hashes": [ + "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", + "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + ], + "index": "pypi", + "version": "==0.16" + }, + "jmespath": { + "hashes": [ + "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", + "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" + ], + "index": "pypi", + "version": "==0.10.0" + }, + "pyaml": { + "hashes": [ + "sha256:29a5c2a68660a799103d6949167bd6c7953d031449d08802386372de1db6ad71", + "sha256:67081749a82b72c45e5f7f812ee3a14a03b3f5c25ff36ec3b290514f8c4c4b99" + ], + "index": "pypi", + "version": "==20.4.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", + "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" + ], + "index": "pypi", + "version": "==2.8.1" + }, + "pyyaml": { + "hashes": [ + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + ], + "index": "pypi", + "version": "==5.3.1" + }, + "s3transfer": { + "hashes": [ + "sha256:35627b86af8ff97e7ac27975fe0a98a312814b46c6333d8a6b889627bcd80994", + "sha256:efa5bd92a897b6a8d5c1383828dca3d52d0790e0756d49740563a3fb6ed03246" + ], + "version": "==0.3.7" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "version": "==1.16.0" + }, + "urllib3": { + "hashes": [ + "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", + "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" + ], + "markers": "python_version != '3.4'", + "version": "==1.26.5" + } + }, + "develop": { + "appdirs": { + "hashes": [ + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" + ], + "version": "==1.4.4" + }, + "attrs": { + "hashes": [ + "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", + "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + ], + "version": "==21.2.0" + }, + "black": { + "hashes": [ + "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea" + ], + "index": "pypi", + "version": "==20.8b1" + }, + "bleach": { + "hashes": [ + "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", + "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" + ], + "version": "==3.3.0" + }, + "bump2version": { + "hashes": [ + "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410", + "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6" + ], + "version": "==1.0.1" + }, + "bumpversion": { + "hashes": [ + "sha256:4ba55e4080d373f80177b4dabef146c07ce73c7d1377aabf9d3c3ae1f94584a6", + "sha256:4eb3267a38194d09f048a2179980bb4803701969bff2c85fa8f6d1ce050be15e" + ], + "index": "pypi", + "version": "==0.6.0" + }, + "certifi": { + "hashes": [ + "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", + "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" + ], + "version": "==2021.5.30" + }, + "cffi": { + "hashes": [ + "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", + "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373", + "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69", + "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f", + "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", + "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05", + "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", + "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", + "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0", + "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", + "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7", + "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f", + "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", + "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", + "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76", + "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", + "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", + "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed", + "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", + "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", + "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", + "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", + "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", + "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", + "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", + "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55", + "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", + "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", + "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", + "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", + "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", + "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", + "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", + "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", + "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", + "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", + "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", + "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", + "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", + "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", + "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", + "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", + "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", + "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc", + "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", + "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", + "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333", + "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", + "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" + ], + "version": "==1.14.5" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "version": "==4.0.0" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "index": "pypi", + "version": "==7.1.2" + }, + "codecov": { + "hashes": [ + "sha256:6cde272454009d27355f9434f4e49f238c0273b216beda8472a65dc4957f473b", + "sha256:ba8553a82942ce37d4da92b70ffd6d54cf635fc1793ab0a7dc3fecd6ebfb3df8" + ], + "index": "pypi", + "version": "==2.1.11" + }, + "colorama": { + "hashes": [ + "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", + "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" + ], + "version": "==0.4.4" + }, + "coverage": { + "hashes": [ + "sha256:00368e6328ebff76197fff5f4d5704b44098f89d8d99a67a349ad6674ec0b157", + "sha256:0389690e0a1c94e9a246dc3130355d70805e51ca509db1bf07fbde27efb33aa4", + "sha256:065d2181f44392893d37d0a4f9ff60b485d705f733356d0a2fb292a58c6f2e0f", + "sha256:082febdba717c769da92d5e19e14a659ebef6daab19b67fced304b7b8d2475e2", + "sha256:0a35ae0d590effb7cc96e7d6935ae2ab8a51526a111fbe0f12d1671aa9fdc377", + "sha256:142493f0400a0bd5acf03c52971229e937323c3e24c372800ae1c44a503e0921", + "sha256:186f53367a08e8d24cc534c7cbfa43a82d1618a48dec2e0c56e80577ec1888fe", + "sha256:2163a00bcd613e95e118c01ea2811f705fbbacf1904d657b24d306879e2303d3", + "sha256:24ecf342b1e23de259d81b3adc83578935babeb54f6950c9bd9534b12443a49c", + "sha256:2c24d3e09f433817ddd0cb2e8f82f8b42cd09a8ac558462fedf99be479ed4851", + "sha256:2d741575de4a13869c9d4a685235bacc897c94afd3703e2ad4fdc362f37e87da", + "sha256:305ca73c09dd84054a3a8f53d5b70e0325b5b303245d0b96ed505698dc7e8ea7", + "sha256:4bf1d0a390de707f8bfd49efdcdac9366ce77ed64cb35b344f58b1ec62517317", + "sha256:50d90d6b753debb7568621125aad4e5fb418e7bdcb0dba3fa6f4ee82994b35d4", + "sha256:5a2079bca21fa959608223b90cf2f95ce686a6497fb12bfaaa7bb24c3e298199", + "sha256:60c6d433f0357db7ed2a2a698fb75b8ce919ce547d6d6bc79c576e090f509768", + "sha256:66cfae29bccea703f02d8997f60d71e236c5a321588f5aa5a318bd88ca23dc0a", + "sha256:6d6fc990962559de1f3685eb3e365ca60f2e3257bfd145bf675c566b8ebb1944", + "sha256:703b126f3ad20c463b545e199c4da460695630da5fdfd949de6a6269b45eabab", + "sha256:730cee22c41852b90948343cdfd183db1e96a9de69fd4dabec3532c582afea68", + "sha256:7e4a16bde8a3b7424b2955130f5a6c29e741e7138fe05c5d9d72efc356076a80", + "sha256:801e8277958bc2e6cc1f2443a20a2a97f79583aa64524b130e1c0de44c287ca9", + "sha256:80baa69a78d5696c60b72dee44ac3d5ccf75ee82e84d018938ddf642d036a6a8", + "sha256:80c00ce9cef80afbf18d16cb3052f5601ba8d087501d829169eecb33c153346a", + "sha256:89db5a374d793344087732207ee15869549486b2148e3e2e6effe22146351fcd", + "sha256:917b98cc5725ea2e0b88c74d34182589a9be07092cb35b861ea9e74189174f71", + "sha256:9398f8fd89f6f260e94e57559df1885b8200b18312824b617a8789e0f5e7dc74", + "sha256:95b6f212bb0c7379f1f2f6e47c722fbdc7355d8b7488a68649e83dfa29522704", + "sha256:9f23313f3e494475581d46de3b8b6bdcf618ee1df412490e779a9aa0a6c72162", + "sha256:9f6f26e5b129bb0218aab30d368d6ead750517a457986f8854b1df4b4c318098", + "sha256:a502693c83a2c6558bc45b4c2dc01a00c9b99cb3cf846913438933a44af174fc", + "sha256:aa4999130a8e892fa9051edc18bf4daa0a2839d3f3de2dcfcbf0ae4619ee3b5e", + "sha256:b10be0b80784c1beb8061e5ce938d8511a182125de5fc695a60f0561b984d361", + "sha256:b1f7b23a606aaf2464eb81c23b5b20623e2ba44b4aaca6ea9bfe00e84a1a5264", + "sha256:b78c8d232d97dbc8ad3a3d94cc15fccabe9a331685d76d2e5cb5284acc4a5feb", + "sha256:b88fa862817035ad7921f2641c27a85dab12cc685ad3ef29c0caaf5b3d10a868", + "sha256:b93fb9137070899b5f10d6487724f4427b5945983a785e1e2f1102c5e175c516", + "sha256:b9639e16c1bc4eb8a78b3b30df4146bb78df5d52ba1b7454b634abd89aede6cc", + "sha256:baa3b6be365c97f80d92a397cb8963dcd9bc22d101b39784e77a9cad093812f8", + "sha256:c06c5758bae454a49dc3e7917804b46c31bb4a72cedfc8e7b4f17a318b3de9d6", + "sha256:c544153709e93ea7e9edcefee72f5afcf484a9cb526067065f9419419f4a3694", + "sha256:c6c74260ba130f7c20a340e8f9e544b0941621641f53edcf69e4602e12c9f29e", + "sha256:d040615ff5c02ffd97ba9f0f73b9db34c09b8142fbfdd363b2a79fa6a554242c", + "sha256:d85774b1ac09ec1d958e63baa436cc4c90e2e910294847ba51dcc3ca3ca04a63", + "sha256:e508bb216eee8350e77b436f9f99c4f2d8335ecb51483f5ffd8bf5e84aaa56d1", + "sha256:ea1cb38b1a52392ebb4e93eaf4a44b3cfdec35cca3f78a9a599f27b7f27031e2", + "sha256:ec310e0029d530d1043f638b7a326b349884421572626bc2909408da7b0d03e5", + "sha256:ed04b79f53fa975660f1a598120c504a0f4529170eeaf0d823fcc1f06f4d2e0f", + "sha256:f4909ee1ddabed351f0fa55063a7dbe498001e2245a9602d9fb0fd74afecdca9", + "sha256:f49ae9e19737493911e7f8e551310f719f463e442ea1ec92fe0804c62066a7e8", + "sha256:f4c93e6102087dda4931fcd50fa4ad44e8e43e09419290c5f05cc2c690961ebf", + "sha256:fa1b639d85af4794cb20d7cfd4c5ae38e94a418b17a2318a1992b470fb68260d" + ], + "version": "==5.6b1" + }, + "coveralls": { + "hashes": [ + "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc", + "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617" + ], + "index": "pypi", + "version": "==2.2.0" + }, + "cryptography": { + "hashes": [ + "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d", + "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959", + "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6", + "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873", + "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2", + "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713", + "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1", + "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177", + "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250", + "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca", + "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d", + "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9" + ], + "version": "==3.4.7" + }, + "distlib": { + "hashes": [ + "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736", + "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c" + ], + "version": "==0.3.2" + }, + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "version": "==0.6.2" + }, + "docutils": { + "hashes": [ + "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", + "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + ], + "index": "pypi", + "version": "==0.16" + }, + "filelock": { + "hashes": [ + "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", + "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" + ], + "version": "==3.0.12" + }, + "flake8": { + "hashes": [ + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + ], + "index": "pypi", + "version": "==3.8.4" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "version": "==2.10" + }, + "importlib-metadata": { + "hashes": [ + "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786", + "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5" + ], + "version": "==4.4.0" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "jeepney": { + "hashes": [ + "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657", + "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae" + ], + "markers": "sys_platform == 'linux'", + "version": "==0.6.0" + }, + "keyring": { + "hashes": [ + "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", + "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48" + ], + "version": "==23.0.1" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "packaging": { + "hashes": [ + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + ], + "version": "==20.9" + }, + "pathspec": { + "hashes": [ + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + ], + "version": "==0.8.1" + }, + "pbr": { + "hashes": [ + "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd", + "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4" + ], + "version": "==5.6.0" + }, + "pipenv": { + "hashes": [ + "sha256:05958fadcd70b2de6a27542fcd2bd72dd5c59c6d35307fdac3e06361fb06e30e", + "sha256:d180f5be4775c552fd5e69ae18a9d6099d9dafb462efe54f11c72cb5f4d5e977" + ], + "version": "==2021.5.29" + }, + "pipenv-to-requirements": { + "hashes": [ + "sha256:1c18682a4ec70eb07261d2b558df3ee22ea00192663a1b98fd1e45e22946c163", + "sha256:cb70471a17a7d4658caffe989539413313d51df1b3a54838bcd7e7d3ab3fcc18" + ], + "index": "pypi", + "version": "==0.9.0" + }, + "pkginfo": { + "hashes": [ + "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", + "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75" + ], + "version": "==1.7.0" + }, + "pluggy": { + "hashes": [ + "sha256:265a94bf44ca13662f12fcd1b074c14d4b269a712f051b6f644ef7e705d6735f", + "sha256:467f0219e89bb5061a8429c6fc5cf055fa3983a0e68e84a1d205046306b37d9e" + ], + "version": "==1.0.0.dev0" + }, + "py": { + "hashes": [ + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + ], + "version": "==1.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + ], + "version": "==2.6.0" + }, + "pycparser": { + "hashes": [ + "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", + "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" + ], + "version": "==2.20" + }, + "pyflakes": { + "hashes": [ + "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", + "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + ], + "version": "==2.2.0" + }, + "pygments": { + "hashes": [ + "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", + "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" + ], + "version": "==2.9.0" + }, + "pyparsing": { + "hashes": [ + "sha256:1c6409312ce2ce2997896af5756753778d5f1603666dba5587804f09ad82ed27", + "sha256:f4896b4cc085a1f8f8ae53a1a90db5a86b3825ff73eb974dffee3d9e701007f4" + ], + "version": "==3.0.0b2" + }, + "pytest": { + "hashes": [ + "sha256:1969f797a1a0dbd8ccf0fecc80262312729afea9c17f1d70ebf85c5e76c6f7c8", + "sha256:66e419b1899bc27346cb2c993e12c5e5e8daba9073c1fbce33b9807abc95c306" + ], + "index": "pypi", + "version": "==6.2.1" + }, + "pytest-cov": { + "hashes": [ + "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a", + "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7" + ], + "index": "pypi", + "version": "==2.12.1" + }, + "readme-renderer": { + "hashes": [ + "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c", + "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db" + ], + "version": "==29.0" + }, + "regex": { + "hashes": [ + "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5", + "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79", + "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31", + "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500", + "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11", + "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14", + "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3", + "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439", + "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c", + "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82", + "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711", + "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093", + "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a", + "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb", + "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8", + "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17", + "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000", + "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d", + "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480", + "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc", + "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0", + "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9", + "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765", + "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e", + "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a", + "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07", + "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f", + "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac", + "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7", + "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed", + "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968", + "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7", + "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2", + "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4", + "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87", + "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8", + "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10", + "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29", + "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605", + "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6", + "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042" + ], + "version": "==2021.4.4" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "version": "==2.25.1" + }, + "requests-toolbelt": { + "hashes": [ + "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", + "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" + ], + "version": "==0.9.1" + }, + "rfc3986": { + "hashes": [ + "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", + "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" + ], + "version": "==1.5.0" + }, + "secretstorage": { + "hashes": [ + "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f", + "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195" + ], + "markers": "sys_platform == 'linux'", + "version": "==3.3.1" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "version": "==1.16.0" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "version": "==0.10.2" + }, + "tqdm": { + "hashes": [ + "sha256:736524215c690621b06fc89d0310a49822d75e599fcd0feb7cc742b98d692493", + "sha256:cd5791b5d7c3f2f1819efc81d36eb719a38e0906a7380365c556779f585ea042" + ], + "version": "==4.61.0" + }, + "twine": { + "hashes": [ + "sha256:2f6942ec2a17417e19d2dd372fc4faa424c87ee9ce49b4e20c427eb00a0f3f41", + "sha256:fcffa8fc37e8083a5be0728371f299598870ee1eccc94e9a25cef7b1dcfa8297" + ], + "index": "pypi", + "version": "==3.3.0" + }, + "typed-ast": { + "hashes": [ + "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", + "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", + "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", + "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", + "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", + "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", + "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", + "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", + "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", + "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", + "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", + "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", + "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", + "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", + "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", + "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", + "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", + "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", + "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", + "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", + "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", + "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", + "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", + "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", + "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", + "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", + "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", + "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", + "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", + "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" + ], + "version": "==1.4.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "version": "==3.10.0.0" + }, + "urllib3": { + "hashes": [ + "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", + "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" + ], + "markers": "python_version != '3.4'", + "version": "==1.26.5" + }, + "virtualenv": { + "hashes": [ + "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467", + "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76" + ], + "version": "==20.4.7" + }, + "virtualenv-clone": { + "hashes": [ + "sha256:07e74418b7cc64f4fda987bf5bc71ebd59af27a7bc9e8a8ee9fd54b1f2390a27", + "sha256:665e48dd54c84b98b71a657acb49104c54e7652bce9c1c4f6c6976ed4c827a29" + ], + "version": "==0.5.4" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" + ], + "version": "==0.5.1" + }, + "wheel": { + "hashes": [ + "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e", + "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e" + ], + "index": "pypi", + "version": "==0.36.2" + }, + "zipp": { + "hashes": [ + "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", + "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" + ], + "version": "==3.4.1" + } + } +} diff --git a/README.md b/README.md new file mode 100644 index 00000000..98a844da --- /dev/null +++ b/README.md @@ -0,0 +1,243 @@ +
+

+ python-lambda logo +

+

+ pypi + pypi + +

+
+ +Python-lambda is a toolset for developing and deploying *serverless* Python code in AWS Lambda. + +# A call for contributors +With python-lambda and pytube both continuing to gain momentum, I'm calling for +contributors to help build out new features, review pull requests, fix bugs, +and maintain overall code quality. If you're interested, please email me at +nficano[at]gmail.com. + +# Description + +AWS Lambda is a service that allows you to write Python, Java, or Node.js code +that gets executed in response to events like http requests or files uploaded +to S3. + +Working with Lambda is relatively easy, but the process of bundling and +deploying your code is not as simple as it could be. + +The *Python-Lambda* library takes away the guess work of developing your +Python-Lambda services by providing you a toolset to streamline the annoying +parts. + +# Requirements + +* Python 2.7, >= 3.6 (At the time of writing this, these are the Python runtimes supported by AWS Lambda). +* Pip (\~8.1.1) +* Virtualenv (\~15.0.0) +* Virtualenvwrapper (\~4.7.1) + + +# Getting Started + +First, you must create an IAM Role on your AWS account called +``lambda_basic_execution`` with the ``LambdaBasicExecution`` policy attached. + +On your computer, create a new virtualenv and project folder. + +```bash +$ mkvirtualenv pylambda +(pylambda) $ mkdir pylambda +``` + +Next, download *Python-Lambda* using pip via pypi. + +```bash +(pylambda) $ pip install python-lambda +``` + +From your ``pylambda`` directory, run the following to bootstrap your project. + +```bash +(pylambda) $ lambda init +``` + +This will create the following files: ``event.json``, ``__init__.py``, +``service.py``, and ``config.yaml``. + +Let's begin by opening ``config.yaml`` in the text editor of your choice. For +the purpose of this tutorial, the only required information is +``aws_access_key_id`` and ``aws_secret_access_key``. You can find these by +logging into the AWS management console. + +Next let's open ``service.py``, in here you'll find the following function: + +```python +def handler(event, context): + # Your code goes here! + e = event.get('e') + pi = event.get('pi') + return e + pi +``` + +This is the handler function; this is the function AWS Lambda will invoke in +response to an event. You will notice that in the sample code ``e`` and ``pi`` +are values in a ``dict``. AWS Lambda uses the ``event`` parameter to pass in +event data to the handler. + +So if, for example, your function is responding to an http request, ``event`` +will be the ``POST`` JSON data and if your function returns something, the +contents will be in your http response payload. + +Next let's open the ``event.json`` file: + +```json +{ + "pi": 3.14, + "e": 2.718 +} +``` +Here you'll find the values of ``e`` and ``pi`` that are being referenced in +the sample code. + +If you now try and run: + +```bash +(pylambda) $ lambda invoke -v +``` + +You will get: +```bash +# 5.858 +# execution time: 0.00000310s +# function execution timeout: 15s +``` + +As you probably put together, the ``lambda invoke`` command grabs the values +stored in the ``event.json`` file and passes them to your function. + +The ``event.json`` file should help you develop your Lambda service locally. +You can specify an alternate ``event.json`` file by passing the +``--event-file=.json`` argument to ``lambda invoke``. + +When you're ready to deploy your code to Lambda simply run: + +```bash +(pylambda) $ lambda deploy +``` + +The deploy script will evaluate your virtualenv and identify your project +dependencies. It will package these up along with your handler function to a +zip file that it then uploads to AWS Lambda. + +You can now log into the +[AWS Lambda management console](https://console.aws.amazon.com/lambda/) to +verify the code deployed successfully. + +### Wiring to an API endpoint + +If you're looking to develop a simple microservice you can easily wire your +function up to an http endpoint. + +Begin by navigating to your [AWS Lambda management console](https://console.aws.amazon.com/lambda/) and +clicking on your function. Click the API Endpoints tab and click "Add API endpoint". + +Under API endpoint type select "API Gateway". + +Next change Method to ``POST`` and Security to "Open" and click submit (NOTE: +you should secure this for use in production, open security is used for demo +purposes). + +At last you need to change the return value of the function to comply with the +standard defined for the API Gateway endpoint, the function should now look +like this: + +``` +def handler(event, context): + # Your code goes here! + e = event.get('e') + pi = event.get('pi') + return { + "statusCode": 200, + "headers": { "Content-Type": "application/json"}, + "body": e + pi + } +``` + +Now try and run: + +```bash +$ curl --header "Content-Type:application/json" \ + --request POST \ + --data '{"pi": 3.14, "e": 2.718}' \ + https:// +# 5.8580000000000005 +``` + +### Environment Variables +Lambda functions support environment variables. In order to set environment +variables for your deployed code to use, you can configure them in +``config.yaml``. To load the value for the environment variable at the time of +deployment (instead of hard coding them in your configuration file), you can +use local environment values (see 'env3' in example code below). + +```yaml +environment_variables: + env1: foo + env2: baz + env3: ${LOCAL_ENVIRONMENT_VARIABLE_NAME} +``` + +This would create environment variables in the lambda instance upon deploy. If +your functions don't need environment variables, simply leave this section out +of your config. + +### Uploading to S3 +You may find that you do not need the toolkit to fully +deploy your Lambda or that your code bundle is too large to upload via the API. +You can use the ``upload`` command to send the bundle to an S3 bucket of your +choosing. Before doing this, you will need to set the following variables in +``config.yaml``: + +```yaml +role: basic_s3_upload +bucket_name: 'example-bucket' +s3_key_prefix: 'path/to/file/' +``` +Your role must have ``s3:PutObject`` permission on the bucket/key that you +specify for the upload to work properly. Once you have that set, you can +execute ``lambda upload`` to initiate the transfer. + +### Deploying via S3 +You can also choose to use S3 as your source for Lambda deployments. This can +be done by issuing ``lambda deploy-s3`` with the same variables/AWS permissions +you'd set for executing the ``upload`` command. + +## Development +Development of "python-lambda" is facilitated exclusively on GitHub. +Contributions in the form of patches, tests and feature creation and/or +requests are very welcome and highly encouraged. Please open an issue if this +tool does not function as you'd expect. + +### Environment Setup +1. [Install pipenv](https://github.com/pypa/pipenv) +2. [Install direnv](https://direnv.net/) +3. [Install Precommit](https://pre-commit.com/#install) (optional but preferred) +4. ``cd`` into the project and enter "direnv allow" when prompted. This will begin + installing all the development dependancies. +5. If you installed pre-commit, run ``pre-commit install`` inside the project + directory to setup the githooks. + +### Releasing to Pypi +Once you pushed your chances to master, run **one** of the following: + + ```sh + # If you're installing a major release: + make deploy-major + + # If you're installing a minor release: + make deploy-minor + +# If you're installing a patch release: +make deploy-patch + ``` diff --git a/README.rst b/README.rst deleted file mode 100644 index 10b0739e..00000000 --- a/README.rst +++ /dev/null @@ -1,212 +0,0 @@ -======== -python-λ -======== - -.. image:: https://img.shields.io/pypi/v/python-lambda.svg - :alt: Pypi - :target: https://pypi.python.org/pypi/python-lambda/ - -.. image:: https://img.shields.io/pypi/pyversions/python-lambda.svg - :alt: Python Versions - :target: https://pypi.python.org/pypi/python-lambda/ - -Python-lambda is a toolset for developing and deploying *serverless* Python code in AWS Lambda. - -A call for contributors -======================= -With python-lambda and `pytube `_ both continuing to gain momentum, I'm calling for contributors to help build out new features, review pull requests, fix bugs, and maintain overall code quality. If you're interested, please email me at nficano[at]gmail.com. - -Description -=========== - -AWS Lambda is a service that allows you to write Python, Java, or Node.js code that gets executed in response to events like http requests or files uploaded to S3. - -Working with Lambda is relatively easy, but the process of bundling and deploying your code is not as simple as it could be. - -The *Python-Lambda* library takes away the guess work of developing your Python-Lambda services by providing you a toolset to streamline the annoying parts. - -Requirements -============ - -* Python 2.7 & 3.6 (At the time of writing this, AWS Lambda only supports Python 2.7/3.6). -* Pip (~8.1.1) -* Virtualenv (~15.0.0) -* Virtualenvwrapper (~4.7.1) - -Getting Started -=============== - -First, you must create an IAM Role on your AWS account called `lambda_basic_execution` with the `LambdaBasicExecution` policy attached. - -On your computer, create a new virtualenv and project folder. - -.. code:: bash - - $ mkvirtualenv pylambda - (pylambda) $ mkdir pylambda - -Next, download *Python-Lambda* using pip via pypi. - -.. code:: bash - - (pylambda) $ pip install python-lambda - -From your ``pylambda`` directory, run the following to bootstrap your project. - -.. code:: bash - - (pylambda) $ lambda init - -This will create the following files: ``event.json``, ``__init__.py``, ``service.py``, and ``config.yaml``. - -Let's begin by opening ``config.yaml`` in the text editor of your choice. For the purpose of this tutorial, the only required information is ``aws_access_key_id`` and ``aws_secret_access_key``. You can find these by logging into the AWS management console. - -Next let's open ``service.py``, in here you'll find the following function: - -.. code:: python - - def handler(event, context): - # Your code goes here! - e = event.get('e') - pi = event.get('pi') - return e + pi - - -This is the handler function; this is the function AWS Lambda will invoke in response to an event. You will notice that in the sample code ``e`` and ``pi`` are values in a ``dict``. AWS Lambda uses the ``event`` parameter to pass in event data to the handler. - -So if, for example, your function is responding to an http request, ``event`` will be the ``POST`` JSON data and if your function returns something, the contents will be in your http response payload. - -Next let's open the ``event.json`` file: - -.. code:: json - - { - "pi": 3.14, - "e": 2.718 - } - -Here you'll find the values of ``e`` and ``pi`` that are being referenced in the sample code. - -If you now try and run: - -.. code:: bash - - (pylambda) $ lambda invoke -v - -You will get: - -.. code:: bash - - # 5.858 - - # execution time: 0.00000310s - # function execution timeout: 15s - -As you probably put together, the ``lambda invoke`` command grabs the values stored in the ``event.json`` file and passes them to your function. - -The ``event.json`` file should help you develop your Lambda service locally. You can specify an alternate ``event.json`` file by passing the ``--event-file=.json`` argument to ``lambda invoke``. - -When you're ready to deploy your code to Lambda simply run: - -.. code:: bash - - (pylambda) $ lambda deploy - -The deploy script will evaluate your virtualenv and identify your project dependencies. It will package these up along with your handler function to a zip file that it then uploads to AWS Lambda. - -You can now log into the `AWS Lambda management console `_ to verify the code deployed successfully. - -Wiring to an API endpoint -========================= - -If you're looking to develop a simple microservice you can easily wire your function up to an http endpoint. - -Begin by navigating to your `AWS Lambda management console `_ and clicking on your function. Click the API Endpoints tab and click "Add API endpoint". - -Under API endpoint type select "API Gateway". - -Next change Method to ``POST`` and Security to "Open" and click submit (NOTE: you should secure this for use in production, open security is used for demo purposes). - -At last you need to change the return value of the function to comply with the standard defined for the API Gateway endpoint, the function should now look like this: - -.. code:: python - - def handler(event, context): - # Your code goes here! - e = event.get('e') - pi = event.get('pi') - return { - "statusCode": 200, - "headers": { "Content-Type": "application/json"}, - "body": e + pi - } - -Now try and run: - -.. code:: bash - - $ curl --header "Content-Type:application/json" \ - --request POST \ - --data '{"pi": 3.14, "e": 2.718}' \ - https:// - # 5.8580000000000005 - -Environment Variables -===================== -Lambda functions support environment variables. In order to set environment variables for your deployed code to use, you can configure them in ``config.yaml``. To load the -value for the environment variable at the time of deployment (instead of hard coding them in your configuration file), you can use local environment values (see 'env3' in example code below). - -.. code:: yaml - - environment_variables: - env1: foo - env2: baz - env3: ${LOCAL_ENVIRONMENT_VARIABLE_NAME} - -This would create environment variables in the lambda instance upon deploy. If your functions don't need environment variables, simply leave this section out of your config. - -Uploading to S3 -=============== -You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing. -Before doing this, you will need to set the following variables in ``config.yaml``: - -.. code:: yaml - - role: basic_s3_upload - bucket_name: 'example-bucket' - s3_key_prefix: 'path/to/file/' - -Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer. - -Deploying via S3 -=============== -You can also choose to use S3 as your source for Lambda deployments. This can be done by issuing ``lambda deploy_s3`` with the same variables/AWS permissions you'd set for executing the ``upload`` command. - -Development -=========== - -Development of "python-lambda" is facilitated exclusively on GitHub. Contributions in the form of patches, tests and feature creation and/or requests are very welcome and highly encouraged. Please open an issue if this tool does not function as you'd expect. - - -How to release updates ----------------------- - -If this is the first time you're releasing to pypi, you'll need to run: ``pip install -r tests/dev_requirements.txt``. - -Once complete, execute the following commands: - -.. code:: bash - - git checkout master - - # Increment the version number and tag the release. - bumpversion [major|minor|patch] - - # Upload the distribution to PyPi - python setup.py sdist bdist_wheel upload - - # Since master often contains work-in-progress changes, increment the version - # to a patch release to prevent inaccurate attribution. - bumpversion --no-tag patch - - git push origin master --tags diff --git a/artwork/python-lambda.svg b/artwork/python-lambda.svg new file mode 100644 index 00000000..0136f802 --- /dev/null +++ b/artwork/python-lambda.svg @@ -0,0 +1,27 @@ + + + + Artboard + Created with Sketch. + + + + + + + + + + + + + + + + + + + + + + diff --git a/aws_lambda/__init__.py b/aws_lambda/__init__.py old mode 100755 new mode 100644 index d151ac9a..35145b50 --- a/aws_lambda/__init__.py +++ b/aws_lambda/__init__.py @@ -1,18 +1,28 @@ -# -*- coding: utf-8 -*- # flake8: noqa -__author__ = 'Nick Ficano' -__email__ = 'nficano@gmail.com' -__version__ = '3.0.3' +__author__ = "Nick Ficano" +__email__ = "nficano@gmail.com" +__version__ = "11.8.0" -from .aws_lambda import deploy, deploy_s3, invoke, init, build, upload, cleanup_old_versions +from .aws_lambda import ( + deploy, + deploy_s3, + invoke, + init, + build, + upload, + cleanup_old_versions, +) # Set default logging handler to avoid "No handler found" warnings. import logging + try: # Python 2.7+ from logging import NullHandler except ImportError: + class NullHandler(logging.Handler): def emit(self, record): pass + logging.getLogger(__name__).addHandler(NullHandler()) diff --git a/aws_lambda/aws_lambda.py b/aws_lambda/aws_lambda.py old mode 100755 new mode 100644 index 44f37cc6..0b5ca884 --- a/aws_lambda/aws_lambda.py +++ b/aws_lambda/aws_lambda.py @@ -1,39 +1,58 @@ -# -*- coding: utf-8 -*- -from __future__ import print_function - import hashlib import json import logging import os +import subprocess import sys import time from collections import defaultdict -from imp import load_source + from shutil import copy from shutil import copyfile +from shutil import copystat from shutil import copytree from tempfile import mkdtemp import boto3 import botocore -import pip import yaml +import sys from .helpers import archive from .helpers import get_environment_variable_value +from .helpers import LambdaContext from .helpers import mkdir from .helpers import read from .helpers import timestamp ARN_PREFIXES = { - 'us-gov-west-1': 'aws-us-gov', + "cn-north-1": "aws-cn", + "cn-northwest-1": "aws-cn", + "us-gov-west-1": "aws-us-gov", } log = logging.getLogger(__name__) -def cleanup_old_versions(src, keep_last_versions, config_file='config.yaml'): +def load_source(module_name, module_path): + """Loads a python module from the path of the corresponding file.""" + + if sys.version_info[0] == 3 and sys.version_info[1] >= 5: + import importlib.util + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + elif sys.version_info[0] == 3 and sys.version_info[1] < 5: + import importlib.machinery + loader = importlib.machinery.SourceFileLoader(module_name, module_path) + module = loader.load_module() + return module + + +def cleanup_old_versions( + src, keep_last_versions, config_file="config.yaml", profile_name=None, +): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version @@ -48,39 +67,47 @@ def cleanup_old_versions(src, keep_last_versions, config_file='config.yaml'): print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) - aws_access_key_id = cfg.get('aws_access_key_id') - aws_secret_access_key = cfg.get('aws_secret_access_key') + profile_name = cfg.get("profile") + aws_access_key_id = cfg.get("aws_access_key_id") + aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( - 'lambda', aws_access_key_id, aws_secret_access_key, - cfg.get('region'), + "lambda", + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region"), ) response = client.list_versions_by_function( - FunctionName=cfg.get('function_name'), + FunctionName=cfg.get("function_name"), ) - versions = response.get('Versions') - if len(response.get('Versions')) < keep_last_versions: - print('Nothing to delete. (Too few versions published)') + versions = response.get("Versions") + if len(response.get("Versions")) < keep_last_versions: + print("Nothing to delete. (Too few versions published)") else: - version_numbers = [elem.get('Version') for elem in - versions[1:-keep_last_versions]] + version_numbers = [ + elem.get("Version") for elem in versions[1:-keep_last_versions] + ] for version_number in version_numbers: try: client.delete_function( - FunctionName=cfg.get('function_name'), + FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: - print('Skipping Version {}: {}' - .format(version_number, e.message)) + print(f"Skipping Version {version_number}: {e}") def deploy( - src, use_requirements=False, local_package=None, - config_file='config.yaml', + src, + requirements=None, + local_package=None, + config_file="config.yaml", + profile_name=None, + preserve_vpc=False, ): """Deploys a new function to AWS Lambda. @@ -93,26 +120,35 @@ def deploy( """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( - src, config_file=config_file, - use_requirements=use_requirements, + src, + config_file=config_file, + requirements=requirements, local_package=local_package, ) - if function_exists(cfg, cfg.get('function_name')): - update_function(cfg, path_to_zip_file) + existing_config = get_function_config(cfg) + if existing_config: + update_function( + cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc + ) else: create_function(cfg, path_to_zip_file) def deploy_s3( - src, use_requirements=False, local_package=None, config_file='config.yaml', + src, + requirements=None, + local_package=None, + config_file="config.yaml", + profile_name=None, + preserve_vpc=False, ): """Deploys a new function via AWS S3. @@ -125,28 +161,41 @@ def deploy_s3( """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( - src, config_file=config_file, use_requirements=use_requirements, + src, + config_file=config_file, + requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) - if function_exists(cfg, cfg.get('function_name')): - update_function(cfg, path_to_zip_file, use_s3, s3_file) + existing_config = get_function_config(cfg) + if existing_config: + update_function( + cfg, + path_to_zip_file, + existing_config, + use_s3=use_s3, + s3_file=s3_file, + preserve_vpc=preserve_vpc, + ) else: - create_function(cfg, path_to_zip_file, use_s3, s3_file) + create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file) def upload( - src, use_requirements=False, local_package=None, - config_file='config.yaml', + src, + requirements=None, + local_package=None, + config_file="config.yaml", + profile_name=None, ): """Uploads a new function to AWS S3. @@ -159,14 +208,16 @@ def upload( """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( - src, config_file=config_file, use_requirements=use_requirements, + src, + config_file=config_file, + requirements=requirements, local_package=local_package, ) @@ -174,7 +225,10 @@ def upload( def invoke( - src, event_file='event.json', config_file='config.yaml', + src, + event_file="event.json", + config_file="config.yaml", + profile_name=None, verbose=False, ): """Simulates a call to your function. @@ -189,11 +243,15 @@ def invoke( """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) + + # Set AWS_PROFILE environment variable based on `--profile` option. + if profile_name: + os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. - env_vars = cfg.get('environment_variables') + env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) @@ -208,22 +266,27 @@ def invoke( except ValueError: sys.path.append(src) - handler = cfg.get('handler') + handler = cfg.get("handler") # Inspect the handler string (.) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) - # TODO: look into mocking the ``context`` variable, currently being passed - # as None. + timeout = cfg.get("timeout") + if timeout: + context = LambdaContext(cfg.get("function_name"), timeout) + else: + context = LambdaContext(cfg.get("function_name")) start = time.time() - results = fn(event, None) + results = fn(event, context) end = time.time() - print('{0}'.format(results)) + print("{0}".format(results)) if verbose: - print('\nexecution time: {:.8f}s\nfunction execution ' - 'timeout: {:2}s'.format(end - start, cfg.get('timeout', 15))) + print( + "\nexecution time: {:.8f}s\nfunction execution " + "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) + ) def init(src, minimal=False): @@ -236,10 +299,10 @@ def init(src, minimal=False): """ templates_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'project_templates', + os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): - if (minimal and filename == 'event.json') or filename.endswith('.pyc'): + if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) @@ -248,7 +311,11 @@ def init(src, minimal=False): def build( - src, use_requirements=False, local_package=None, config_file='config.yaml', + src, + requirements=None, + local_package=None, + config_file="config.yaml", + profile_name=None, ): """Builds the file bundle. @@ -261,67 +328,65 @@ def build( """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) - cfg = read(path_to_config_file, loader=yaml.load) + cfg = read_cfg(path_to_config_file, profile_name) # Get the absolute path to the output directory and create it if it doesn't # already exist. - dist_directory = cfg.get('dist_directory', 'dist') + dist_directory = cfg.get("dist_directory", "dist") path_to_dist = os.path.join(src, dist_directory) mkdir(path_to_dist) # Combine the name of the Lambda function with the current timestamp to use # for the output filename. - function_name = cfg.get('function_name') - output_filename = '{0}-{1}.zip'.format(timestamp(), function_name) + function_name = cfg.get("function_name") + output_filename = "{0}-{1}.zip".format(timestamp(), function_name) - path_to_temp = mkdtemp(prefix='aws-lambda') + path_to_temp = mkdtemp(prefix="aws-lambda") pip_install_to_target( - path_to_temp, - use_requirements=use_requirements, - local_package=local_package, + path_to_temp, requirements=requirements, local_package=local_package, ) # Hack for Zope. - if 'zope' in os.listdir(path_to_temp): + if "zope" in os.listdir(path_to_temp): print( - 'Zope packages detected; fixing Zope package paths to ' - 'make them importable.', + "Zope packages detected; fixing Zope package paths to " + "make them importable.", ) # Touch. - with open(os.path.join(path_to_temp, 'zope/__init__.py'), 'wb'): + with open(os.path.join(path_to_temp, "zope/__init__.py"), "wb"): pass # Gracefully handle whether ".zip" was included in the filename or not. output_filename = ( - '{0}.zip'.format(output_filename) - if not output_filename.endswith('.zip') + "{0}.zip".format(output_filename) + if not output_filename.endswith(".zip") else output_filename ) # Allow definition of source code directories we want to build into our # zipped package. - build_config = defaultdict(**cfg.get('build', {})) - build_source_directories = build_config.get('source_directories', '') + build_config = defaultdict(**cfg.get("build", {})) + build_source_directories = build_config.get("source_directories", "") build_source_directories = ( build_source_directories if build_source_directories is not None - else '' + else "" ) source_directories = [ - d.strip() for d in build_source_directories.split(',') + d.strip() for d in build_source_directories.split(",") ] files = [] for filename in os.listdir(src): if os.path.isfile(filename): - if filename == '.DS_Store': + if filename == ".DS_Store": continue if filename == config_file: continue - print('Bundling: %r' % filename) + print("Bundling: %r" % filename) files.append(os.path.join(src, filename)) elif os.path.isdir(filename) and filename in source_directories: - print('Bundling directory: %r' % filename) + print("Bundling directory: %r" % filename) files.append(os.path.join(src, filename)) # "cd" into `temp_path` directory. @@ -332,18 +397,22 @@ def build( # Copy handler file into root of the packages folder. copyfile(f, os.path.join(path_to_temp, filename)) + copystat(f, os.path.join(path_to_temp, filename)) elif os.path.isdir(f): - destination_folder = os.path.join(path_to_temp, f[len(src) + 1:]) + src_path_length = len(src) + 1 + destination_folder = os.path.join( + path_to_temp, f[src_path_length:] + ) copytree(f, destination_folder) # Zip them together into a single file. # TODO: Delete temp directory created once the archive has been compiled. - path_to_zip_file = archive('./', path_to_dist, output_filename) + path_to_zip_file = archive("./", path_to_dist, output_filename) return path_to_zip_file def get_callable_handler_function(src, handler): - """Tranlate a string of the form "module.function" into a callable + """Translate a string of the form "module.function" into a callable function. :param str src: @@ -355,7 +424,7 @@ def get_callable_handler_function(src, handler): # "cd" into `src` directory. os.chdir(src) - module_name, function_name = handler.split('.') + module_name, function_name = handler.split(".") filename = get_handler_filename(handler) path_to_module_file = os.path.join(src, filename) @@ -369,8 +438,8 @@ def get_handler_filename(handler): :param str handler: A dot delimited string representing the `.`. """ - module_name, _ = handler.split('.') - return '{0}.py'.format(module_name) + module_name, _ = handler.split(".") + return "{0}.py".format(module_name) def _install_packages(path, packages): @@ -384,46 +453,65 @@ def _install_packages(path, packages): :param list packages: A list of packages to be installed via pip. """ + def _filter_blacklist(package): - blacklist = ['-i', '#', 'Python==', 'python-lambda=='] + blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist) + filtered_packages = filter(_filter_blacklist, packages) for package in filtered_packages: - if package.startswith('-e '): - package = package.replace('-e ', '') - - print('Installing {package}'.format(package=package)) - pip.main(['install', package, '-t', path, '--ignore-installed']) + if package.startswith("-e "): + package = package.replace("-e ", "") + + print("Installing {package}".format(package=package)) + subprocess.check_call( + [ + sys.executable, + "-m", + "pip", + "install", + package, + "-t", + path, + "--ignore-installed", + ] + ) + print( + "Install directory contents are now: {directory}".format( + directory=os.listdir(path) + ) + ) -def pip_install_to_target(path, use_requirements=False, local_package=None): +def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. - :param bool use_requirements: - If set, only the packages in the requirements.txt file are installed. - The requirements.txt file needs to be in the same directory as the - project which shall be deployed. - Defaults to false and installs all pacakges found via pip freeze if - not set. + :param str requirements: + If set, only the packages in the supplied requirements file are + installed. + If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] - if not use_requirements: - print('Gathering pip packages') - packages.extend(pip.operations.freeze.freeze()) + if not requirements: + print("Gathering pip packages") + pkgStr = subprocess.check_output( + [sys.executable, "-m", "pip", "freeze"] + ) + packages.extend(pkgStr.decode("utf-8").splitlines()) else: - if os.path.exists('requirements.txt'): - print('Gathering requirement packages') - data = read('requirements.txt') + if os.path.exists(requirements): + print("Gathering requirement packages") + data = read(requirements) packages.extend(data.splitlines()) if not packages: - print('No dependency packages installed!') + print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): @@ -435,231 +523,325 @@ def pip_install_to_target(path, use_requirements=False, local_package=None): def get_role_name(region, account_id, role): """Shortcut to insert the `account_id` and `role` into the iam string.""" - prefix = ARN_PREFIXES.get(region, 'aws') - return 'arn:{0}:iam::{1}:role/{2}'.format(prefix, account_id, role) + prefix = ARN_PREFIXES.get(region, "aws") + return "arn:{0}:iam::{1}:role/{2}".format(prefix, account_id, role) -def get_account_id(aws_access_key_id, aws_secret_access_key, region=None): +def get_account_id( + profile_name, aws_access_key_id, aws_secret_access_key, region=None, +): """Query STS for a users' account_id""" client = get_client( - 'sts', aws_access_key_id, aws_secret_access_key, - region, + "sts", profile_name, aws_access_key_id, aws_secret_access_key, region, ) - return client.get_caller_identity().get('Account') + return client.get_caller_identity().get("Account") -def get_client(client, aws_access_key_id, aws_secret_access_key, region=None): +def get_client( + client, + profile_name, + aws_access_key_id, + aws_secret_access_key, + region=None, +): """Shortcut for getting an initialized instance of the boto3 client.""" - return boto3.client( - client, + boto3.setup_default_session( + profile_name=profile_name, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region, ) + return boto3.client(client) -def create_function(cfg, path_to_zip_file, *use_s3, **s3_file): +def create_function(cfg, path_to_zip_file, use_s3=False, s3_file=None): """Register and upload a function to AWS Lambda.""" - print('Creating your new Lambda function') + print("Creating your new Lambda function") byte_stream = read(path_to_zip_file, binary_file=True) - aws_access_key_id = cfg.get('aws_access_key_id') - aws_secret_access_key = cfg.get('aws_secret_access_key') + profile_name = cfg.get("profile") + aws_access_key_id = cfg.get("aws_access_key_id") + aws_secret_access_key = cfg.get("aws_secret_access_key") account_id = get_account_id( - aws_access_key_id, aws_secret_access_key, cfg.get('region'), + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region",), ) role = get_role_name( - cfg.get('region'), account_id, - cfg.get('role', 'lambda_basic_execution'), + cfg.get("region"), + account_id, + cfg.get("role", "lambda_basic_execution"), ) client = get_client( - 'lambda', aws_access_key_id, aws_secret_access_key, - cfg.get('region'), + "lambda", + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region"), ) # Do we prefer development variable over config? - buck_name = ( - os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') - ) - func_name = ( - os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name') + buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") + func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get( + "function_name" ) - print('Creating lambda function with name: {}'.format(func_name)) + print("Creating lambda function with name: {}".format(func_name)) if use_s3: kwargs = { - 'FunctionName': func_name, - 'Runtime': cfg.get('runtime', 'python2.7'), - 'Role': role, - 'Handler': cfg.get('handler'), - 'Code': { - 'S3Bucket': '{}'.format(buck_name), - 'S3Key': '{}'.format(s3_file), + "FunctionName": func_name, + "Runtime": cfg.get("runtime", "python2.7"), + "Role": role, + "Handler": cfg.get("handler"), + "Code": { + "S3Bucket": "{}".format(buck_name), + "S3Key": "{}".format(s3_file), + }, + "Description": cfg.get("description", ""), + "Timeout": cfg.get("timeout", 15), + "MemorySize": cfg.get("memory_size", 512), + "VpcConfig": { + "SubnetIds": cfg.get("subnet_ids", []), + "SecurityGroupIds": cfg.get("security_group_ids", []), }, - 'Description': cfg.get('description'), - 'Timeout': cfg.get('timeout', 15), - 'MemorySize': cfg.get('memory_size', 512), - 'Publish': True, + "Publish": True, } else: kwargs = { - 'FunctionName': func_name, - 'Runtime': cfg.get('runtime', 'python2.7'), - 'Role': role, - 'Handler': cfg.get('handler'), - 'Code': {'ZipFile': byte_stream}, - 'Description': cfg.get('description'), - 'Timeout': cfg.get('timeout', 15), - 'MemorySize': cfg.get('memory_size', 512), - 'Publish': True, + "FunctionName": func_name, + "Runtime": cfg.get("runtime", "python2.7"), + "Role": role, + "Handler": cfg.get("handler"), + "Code": {"ZipFile": byte_stream}, + "Description": cfg.get("description", ""), + "Timeout": cfg.get("timeout", 15), + "MemorySize": cfg.get("memory_size", 512), + "VpcConfig": { + "SubnetIds": cfg.get("subnet_ids", []), + "SecurityGroupIds": cfg.get("security_group_ids", []), + }, + "Publish": True, } - if 'environment_variables' in cfg: + if "tags" in cfg: + kwargs.update( + Tags={key: str(value) for key, value in cfg.get("tags").items()} + ) + + if "environment_variables" in cfg: kwargs.update( Environment={ - 'Variables': { + "Variables": { key: get_environment_variable_value(value) - for key, value - in cfg.get('environment_variables').items() + for key, value in cfg.get("environment_variables").items() }, }, ) client.create_function(**kwargs) + concurrency = get_concurrency(cfg) + if concurrency > 0: + client.put_function_concurrency( + FunctionName=func_name, ReservedConcurrentExecutions=concurrency + ) + -def update_function(cfg, path_to_zip_file, *use_s3, **s3_file): +def update_function( + cfg, + path_to_zip_file, + existing_cfg, + use_s3=False, + s3_file=None, + preserve_vpc=False, +): """Updates the code of an existing Lambda function""" - print('Updating your Lambda function') + print("Updating your Lambda function") byte_stream = read(path_to_zip_file, binary_file=True) - aws_access_key_id = cfg.get('aws_access_key_id') - aws_secret_access_key = cfg.get('aws_secret_access_key') + profile_name = cfg.get("profile") + aws_access_key_id = cfg.get("aws_access_key_id") + aws_secret_access_key = cfg.get("aws_secret_access_key") account_id = get_account_id( - aws_access_key_id, aws_secret_access_key, cfg.get('region'), + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region",), ) role = get_role_name( - cfg.get('region'), account_id, - cfg.get('role', 'lambda_basic_execution'), + cfg.get("region"), + account_id, + cfg.get("role", "lambda_basic_execution"), ) client = get_client( - 'lambda', aws_access_key_id, aws_secret_access_key, - cfg.get('region'), + "lambda", + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region"), ) # Do we prefer development variable over config? - buck_name = ( - os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') - ) + buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") if use_s3: client.update_function_code( - FunctionName=cfg.get('function_name'), - S3Bucket='{}'.format(buck_name), - S3Key='{}'.format(s3_file), + FunctionName=cfg.get("function_name"), + S3Bucket="{}".format(buck_name), + S3Key="{}".format(s3_file), Publish=True, ) else: client.update_function_code( - FunctionName=cfg.get('function_name'), + FunctionName=cfg.get("function_name"), ZipFile=byte_stream, Publish=True, ) + # Wait for function to be updated + waiter = client.get_waiter('function_updated') + waiter.wait(FunctionName=cfg.get("function_name")) + kwargs = { - 'FunctionName': cfg.get('function_name'), - 'Role': role, - 'Runtime': cfg.get('runtime'), - 'Handler': cfg.get('handler'), - 'Description': cfg.get('description'), - 'Timeout': cfg.get('timeout', 15), - 'MemorySize': cfg.get('memory_size', 512), - 'VpcConfig': { - 'SubnetIds': cfg.get('subnet_ids', []), - 'SecurityGroupIds': cfg.get('security_group_ids', []), - }, + "FunctionName": cfg.get("function_name"), + "Role": role, + "Runtime": cfg.get("runtime"), + "Handler": cfg.get("handler"), + "Description": cfg.get("description", ""), + "Timeout": cfg.get("timeout", 15), + "MemorySize": cfg.get("memory_size", 512), } - if 'environment_variables' in cfg: + if preserve_vpc: + kwargs["VpcConfig"] = existing_cfg.get("Configuration", {}).get( + "VpcConfig" + ) + if kwargs["VpcConfig"] is None: + kwargs["VpcConfig"] = { + "SubnetIds": cfg.get("subnet_ids", []), + "SecurityGroupIds": cfg.get("security_group_ids", []), + } + else: + del kwargs["VpcConfig"]["VpcId"] + else: + kwargs["VpcConfig"] = { + "SubnetIds": cfg.get("subnet_ids", []), + "SecurityGroupIds": cfg.get("security_group_ids", []), + } + + if "environment_variables" in cfg: kwargs.update( Environment={ - 'Variables': { + "Variables": { key: str(get_environment_variable_value(value)) - for key, value - in cfg.get('environment_variables').items() + for key, value in cfg.get("environment_variables").items() }, }, ) - client.update_function_configuration(**kwargs) + ret = client.update_function_configuration(**kwargs) + + concurrency = get_concurrency(cfg) + if concurrency > 0: + client.put_function_concurrency( + FunctionName=cfg.get("function_name"), + ReservedConcurrentExecutions=concurrency, + ) + elif "Concurrency" in existing_cfg: + client.delete_function_concurrency( + FunctionName=cfg.get("function_name") + ) + + if "tags" in cfg: + tags = {key: str(value) for key, value in cfg.get("tags").items()} + if tags != existing_cfg.get("Tags"): + if existing_cfg.get("Tags"): + client.untag_resource( + Resource=ret["FunctionArn"], + TagKeys=list(existing_cfg["Tags"].keys()), + ) + client.tag_resource(Resource=ret["FunctionArn"], Tags=tags) def upload_s3(cfg, path_to_zip_file, *use_s3): """Upload a function to AWS S3.""" - print('Uploading your new Lambda function') - aws_access_key_id = cfg.get('aws_access_key_id') - aws_secret_access_key = cfg.get('aws_secret_access_key') + print("Uploading your new Lambda function") + profile_name = cfg.get("profile") + aws_access_key_id = cfg.get("aws_access_key_id") + aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( - 's3', aws_access_key_id, aws_secret_access_key, - cfg.get('region'), + "s3", + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region"), ) - byte_stream = b'' - with open(path_to_zip_file, mode='rb') as fh: + byte_stream = b"" + with open(path_to_zip_file, mode="rb") as fh: byte_stream = fh.read() - s3_key_prefix = cfg.get('s3_key_prefix', '/dist') - checksum = hashlib.new('md5', byte_stream).hexdigest() + s3_key_prefix = cfg.get("s3_key_prefix", "/dist") + checksum = hashlib.new("md5", byte_stream).hexdigest() timestamp = str(time.time()) - filename = '{prefix}{checksum}-{ts}.zip'.format( + filename = "{prefix}{checksum}-{ts}.zip".format( prefix=s3_key_prefix, checksum=checksum, ts=timestamp, ) # Do we prefer development variable over config? - buck_name = ( - os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') - ) - func_name = ( - os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name') + buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name") + func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get( + "function_name" ) kwargs = { - 'Bucket': '{}'.format(buck_name), - 'Key': '{}'.format(filename), - 'Body': byte_stream, + "Bucket": "{}".format(buck_name), + "Key": "{}".format(filename), + "Body": byte_stream, } client.put_object(**kwargs) - print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name)) + print("Finished uploading {} to S3 bucket {}".format(func_name, buck_name)) if use_s3: return filename -def function_exists(cfg, function_name): - """Check whether a function exists or not""" +def get_function_config(cfg): + """Check whether a function exists or not and return its config""" - aws_access_key_id = cfg.get('aws_access_key_id') - aws_secret_access_key = cfg.get('aws_secret_access_key') + function_name = cfg.get("function_name") + profile_name = cfg.get("profile") + aws_access_key_id = cfg.get("aws_access_key_id") + aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( - 'lambda', aws_access_key_id, aws_secret_access_key, - cfg.get('region'), + "lambda", + profile_name, + aws_access_key_id, + aws_secret_access_key, + cfg.get("region"), ) - # Need to loop through until we get all of the lambda functions returned. - # It appears to be only returning 50 functions at a time. - functions = [] - functions_resp = client.list_functions() - functions.extend([ - f['FunctionName'] for f in functions_resp.get('Functions', []) - ]) - while('NextMarker' in functions_resp): - functions_resp = client.list_functions( - Marker=functions_resp.get('NextMarker'), - ) - functions.extend([ - f['FunctionName'] for f in functions_resp.get('Functions', []) - ]) - return function_name in functions + try: + return client.get_function(FunctionName=function_name) + except client.exceptions.ResourceNotFoundException as e: + if "Function not found" in str(e): + return False + + +def get_concurrency(cfg): + """Return the Reserved Concurrent Executions if present in the config""" + concurrency = int(cfg.get("concurrency", 0)) + return max(0, concurrency) + + +def read_cfg(path_to_config_file, profile_name): + cfg = read(path_to_config_file, loader=yaml.full_load) + if profile_name is not None: + cfg["profile"] = profile_name + elif "AWS_PROFILE" in os.environ: + cfg["profile"] = os.environ["AWS_PROFILE"] + return cfg diff --git a/aws_lambda/helpers.py b/aws_lambda/helpers.py index ed3ef70f..edfd8e9d 100644 --- a/aws_lambda/helpers.py +++ b/aws_lambda/helpers.py @@ -2,6 +2,7 @@ import datetime as dt import os import re +import time import zipfile @@ -11,7 +12,7 @@ def mkdir(path): def read(path, loader=None, binary_file=False): - open_mode = 'rb' if binary_file else 'r' + open_mode = "rb" if binary_file else "r" with open(path, mode=open_mode) as fh: if not loader: return fh.read() @@ -20,7 +21,7 @@ def read(path, loader=None, binary_file=False): def archive(src, dest, filename): output = os.path.join(dest, filename) - zfh = zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) + zfh = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED) for root, _, files in os.walk(src): for file in files: @@ -29,7 +30,7 @@ def archive(src, dest, filename): return os.path.join(dest, filename) -def timestamp(fmt='%Y-%m-%d-%H%M%S'): +def timestamp(fmt="%Y-%m-%d-%H%M%S"): now = dt.datetime.utcnow() return now.strftime(fmt) @@ -37,7 +38,32 @@ def timestamp(fmt='%Y-%m-%d-%H%M%S'): def get_environment_variable_value(val): env_val = val if val is not None and isinstance(val, str): - match = re.search(r'^\${(?P\w+)*}$', val) + match = re.search(r"^\${(?P\w+)*}$", val) if match is not None: - env_val = os.environ.get(match.group('environment_key_name')) + env_val = os.environ.get(match.group("environment_key_name")) return env_val + + +class LambdaContext: + def current_milli_time(x): + return int(round(time.time() * 1000)) + + def get_remaining_time_in_millis(self): + return max( + 0, + self.timeout_millis + - (self.current_milli_time() - self.start_time_millis), + ) + + def __init__(self, function_name, timeoutSeconds=3): + self.function_name = function_name + self.function_version = None + self.invoked_function_arn = None + self.memory_limit_in_mb = None + self.aws_request_id = None + self.log_group_name = None + self.log_stream_name = None + self.identity = None + self.client_context = None + self.timeout_millis = timeoutSeconds * 1000 + self.start_time_millis = self.current_milli_time() diff --git a/aws_lambda/project_templates/config.yaml b/aws_lambda/project_templates/config.yaml index 72bfdab4..bc293717 100644 --- a/aws_lambda/project_templates/config.yaml +++ b/aws_lambda/project_templates/config.yaml @@ -19,6 +19,7 @@ aws_secret_access_key: # dist_directory: dist # timeout: 15 # memory_size: 512 +# concurrency: 500 # # Experimental Environment variables @@ -26,6 +27,13 @@ environment_variables: env_1: foo env_2: baz +# If `tags` is uncommented then tags will be set at creation or update +# time. During an update all other tags will be removed except the tags +# listed here. +#tags: +# tag_1: foo +# tag_2: bar + # Build options build: source_directories: lib # a comma delimited list of directories in your project root that contains source to package. diff --git a/aws_lambda/project_templates/service.py b/aws_lambda/project_templates/service.py index e5bcb681..f04dba34 100644 --- a/aws_lambda/project_templates/service.py +++ b/aws_lambda/project_templates/service.py @@ -3,6 +3,6 @@ def handler(event, context): # Your code goes here! - e = event.get('e') - pi = event.get('pi') + e = event.get("e") + pi = event.get("pi") return e + pi diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3edd8ee4..00000000 --- a/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -boto3==1.4.4 -botocore==1.5.62 -click==6.6 -docutils==0.12 -futures==3.0.5 -jmespath==0.9.0 -pyaml==15.8.2 -python-dateutil==2.5.3 -PyYAML==3.11 -six==1.10.0 diff --git a/scripts/lambda b/scripts/lambda index 3f3f7ae8..08c5eef8 100755 --- a/scripts/lambda +++ b/scripts/lambda @@ -9,7 +9,7 @@ import aws_lambda CURRENT_DIR = os.getcwd() -logging.getLogger('pip').setLevel(logging.CRITICAL) +logging.getLogger("pip").setLevel(logging.CRITICAL) @click.group() @@ -17,16 +17,15 @@ def cli(): pass -@click.command(help='Create a new function for Lambda.') +@click.command(help="Create a new function for Lambda.") @click.option( - '--minimal', + "--minimal", default=False, is_flag=True, - help='Exclude any unnecessary template files', + help="Exclude any unnecessary template files", ) @click.argument( - 'folder', nargs=-1, - type=click.Path(file_okay=False, writable=True), + "folder", nargs=-1, type=click.Path(file_okay=False, writable=True), ) def init(folder, minimal): path = CURRENT_DIR @@ -37,146 +36,173 @@ def init(folder, minimal): aws_lambda.init(path, minimal=minimal) -@click.command(help='Bundles package for deployment.') +@click.command(help="Bundles package for deployment.") @click.option( - '--config-file', - default='config.yaml', - help='Alternate config file.', + "--config-file", default="config.yaml", help="Alternate config file.", ) @click.option( - '--use-requirements', - default=False, - is_flag=True, - help='Install all packages defined in requirements.txt', + "--profile", help="AWS profile to use.", +) +@click.option( + "--requirements", + default=None, + type=click.Path(), + help="Install packages from supplied requirements file.", ) @click.option( - '--local-package', + "--local-package", default=None, type=click.Path(), - help='Install local package as well.', + help="Install local package as well.", multiple=True, ) -def build(use_requirements, local_package, config_file): +def build(requirements, local_package, config_file, profile): aws_lambda.build( CURRENT_DIR, - use_requirements=use_requirements, + requirements=requirements, local_package=local_package, config_file=config_file, + profile_name=profile, ) -@click.command(help='Run a local test of your function.') +@click.command(help="Run a local test of your function.") @click.option( - '--event-file', - default='event.json', - help='Alternate event file.', + "--event-file", default="event.json", help="Alternate event file.", ) @click.option( - '--config-file', - default='config.yaml', - help='Alternate config file.', + "--config-file", default="config.yaml", help="Alternate config file.", ) -@click.option('--verbose', '-v', is_flag=True) -def invoke(event_file, config_file, verbose): +@click.option( + "--profile", help="AWS profile to use.", +) +@click.option("--verbose", "-v", is_flag=True) +def invoke(event_file, config_file, profile, verbose): aws_lambda.invoke( CURRENT_DIR, event_file=event_file, config_file=config_file, + profile_name=profile, verbose=verbose, ) -@click.command(help='Register and deploy your code to lambda.') +@click.command(help="Register and deploy your code to lambda.") @click.option( - '--config-file', - default='config.yaml', - help='Alternate config file.', + "--config-file", default="config.yaml", help="Alternate config file.", ) @click.option( - '--use-requirements', - default=False, - is_flag=True, - help='Install all packages defined in requirements.txt', + "--profile", help="AWS profile to use.", ) @click.option( - '--local-package', + "--requirements", default=None, type=click.Path(), - help='Install local package as well.', + help="Install all packages defined in supplied requirements file", +) +@click.option( + "--local-package", + default=None, + type=click.Path(), + help="Install local package as well.", multiple=True, ) -def deploy(use_requirements, local_package, config_file): +@click.option( + "--preserve-vpc", + default=False, + is_flag=True, + help="Preserve VPC configuration on existing functions", +) +def deploy(requirements, local_package, config_file, profile, preserve_vpc): aws_lambda.deploy( CURRENT_DIR, - config_file=config_file, - use_requirements=use_requirements, + requirements=requirements, local_package=local_package, + config_file=config_file, + profile_name=profile, + preserve_vpc=preserve_vpc, ) -@click.command(help='Upload your lambda to S3.') +@click.command(help="Upload your lambda to S3.") @click.option( - '--use-requirements', - default=False, - is_flag=True, - help='Install all packages defined in requirements.txt', + "--config-file", default="config.yaml", help="Alternate config file.", +) +@click.option( + "--profile", help="AWS profile to use.", ) @click.option( - '--local-package', + "--requirements", default=None, type=click.Path(), - help='Install local package as well.', + help="Install all packages defined in supplied requirements file", +) +@click.option( + "--local-package", + default=None, + type=click.Path(), + help="Install local package as well.", multiple=True, ) -def upload(use_requirements, local_package): - aws_lambda.upload(CURRENT_DIR, use_requirements, local_package) +def upload(requirements, local_package, config_file, profile): + aws_lambda.upload( + CURRENT_DIR, + requirements=requirements, + local_package=local_package, + config_file=config_file, + profile_name=profile, + ) -@click.command(help='Deploy your lambda via S3.') +@click.command(help="Deploy your lambda via S3.") @click.option( - '--config-file', - default='config.yaml', - help='Alternate config file.', + "--config-file", default="config.yaml", help="Alternate config file.", ) @click.option( - '--use-requirements', - default=False, - is_flag=True, - help='Install all packages defined in requirements.txt', + "--profile", help="AWS profile to use.", +) +@click.option( + "--requirements", + default=None, + type=click.Path(), + help="Install all packages defined in supplied requirements file", ) @click.option( - '--local-package', + "--local-package", default=None, type=click.Path(), multiple=True, - help='Install local package as well.', + help="Install local package as well.", ) -def deploy_s3(use_requirements, local_package, config_file): +def deploy_s3(requirements, local_package, config_file, profile): aws_lambda.deploy_s3( - CURRENT_DIR, config_file=config_file, - use_requirements=use_requirements, + CURRENT_DIR, + requirements=requirements, local_package=local_package, + config_file=config_file, + profile_name=profile, ) -@click.command(help='Delete old versions of your functions') +@click.command(help="Delete old versions of your functions") +@click.option( + "--config-file", default="config.yaml", help="Alternate config file.", +) @click.option( - '--config-file', - default='config.yaml', - help='Alternate config file.', + "--profile", help="AWS profile to use.", ) @click.option( - '--keep-last', + "--keep-last", type=int, - prompt='Please enter the number of recent versions to keep', + prompt="Please enter the number of recent versions to keep", ) -def cleanup(keep_last, config_file): +def cleanup(keep_last, config_file, profile): aws_lambda.cleanup_old_versions( - CURRENT_DIR, keep_last, config_file=config_file, + CURRENT_DIR, keep_last, config_file=config_file, profile_name=profile, ) -if __name__ == '__main__': +if __name__ == "__main__": cli.add_command(init) cli.add_command(invoke) cli.add_command(deploy) diff --git a/setup.cfg b/setup.cfg index 11b136a0..2d16abea 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,17 +1,20 @@ [bumpversion] commit = True tag = True -current_version = 3.0.3 +current_version = 11.8.0 parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+))? serialize = {major}.{minor}.{patch} +[metadata] +description-file = README.md + [bumpversion:file:setup.py] [bumpversion:file:aws_lambda/__init__.py] -[bdist_wheel] -universal = 1 +[coverage:run] +source = aws_lambda [flake8] exclude = docs diff --git a/setup.py b/setup.py old mode 100755 new mode 100644 index e5ef6457..bce3297e --- a/setup.py +++ b/setup.py @@ -1,62 +1,89 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +"""This module contains setup instructions for python-lambda.""" +import codecs +import os import sys +from shutil import rmtree -import pip +from setuptools import Command from setuptools import find_packages from setuptools import setup -with open('README.rst') as readme_file: - readme = readme_file.read() +REQUIREMENTS = [ + "boto3>=1.4.4", + "click>=6.6", + "PyYAML==5.1", +] +PACKAGE_DATA = { + "aws_lambda": ["project_templates/*"], + "": ["*.json"], +} +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +README = os.path.join(THIS_DIR, "README.md") -requirements = pip.req.parse_requirements( - 'requirements.txt', session=pip.download.PipSession(), -) +with codecs.open(README, encoding="utf-8") as fh: + long_description = "\n" + fh.read() -# Only install futures package if using a Python version <= 2.7 -if sys.version_info[0] == 2: - pip_requirements = [str(r.req) for r in requirements] -else: - pip_requirements = [str(r.req) - for r in requirements if 'futures' not in str(r.req)] -test_requirements = [ - # TODO: put package test requirements here -] +class UploadCommand(Command): + """Support setup.py publish.""" + + description = "Build and publish the package." + user_options = [] + + @staticmethod + def status(s): + """Print in bold.""" + print(f"\033[1m{s}\033[0m") + + def initialize_options(self): + """Initialize options.""" + pass + + def finalize_options(self): + """Finialize options.""" + pass + + def run(self): + """Upload release to Pypi.""" + try: + self.status("Removing previous builds ...") + rmtree(os.path.join(THIS_DIR, "dist")) + except Exception: + pass + self.status("Building Source distribution ...") + os.system(f"{sys.executable} setup.py sdist") + self.status("Uploading the package to PyPI via Twine ...") + os.system("twine upload dist/*") + sys.exit() + setup( - name='python-lambda', - version='3.0.3', - description='The bare minimum for a Python app running on Amazon Lambda.', - long_description=readme, - author='Nick Ficano', - author_email='nficano@gmail.com', - url='https://github.com/nficano/python-lambda', + name="python-lambda", + version="11.8.0", + author="Nick Ficano", + author_email="nficano@gmail.com", packages=find_packages(), - package_data={ - 'aws_lambda': ['project_templates/*'], - '': ['*.json'], - }, - include_package_data=True, - scripts=['scripts/lambda'], - install_requires=pip_requirements, - license='ISCL', - zip_safe=False, - keywords='python-lambda', + url="https://github.com/nficano/python-lambda", + license="ISCL", + install_requires=REQUIREMENTS, + package_data=PACKAGE_DATA, + test_suite="tests", + tests_require=[], classifiers=[ - 'Development Status :: 2 - Pre-Alpha', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: ISC License (ISCL)', - 'Natural Language :: English', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: ISC License (ISCL)", + "Natural Language :: English", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", ], - test_suite='tests', - tests_require=test_requirements, + description="The bare minimum for a Python app running on Amazon Lambda.", + include_package_data=True, + long_description_content_type="text/markdown", + long_description=long_description, + zip_safe=True, + cmdclass={"upload": UploadCommand}, + scripts=["scripts/lambda"], ) diff --git a/tests/__init__.py b/tests/__init__.py old mode 100755 new mode 100644 diff --git a/tests/dev_requirements.txt b/tests/dev_requirements.txt index af92d6d2..0886536b 100644 --- a/tests/dev_requirements.txt +++ b/tests/dev_requirements.txt @@ -1,2 +1,5 @@ bumpversion==0.5.3 -pre-commit==0.15.0 +pre-commit==2.6.0 +pytest>=3.6 +pytest-cov +flake8 diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/test_LambdaContext.py b/tests/unit/test_LambdaContext.py new file mode 100644 index 00000000..16c66303 --- /dev/null +++ b/tests/unit/test_LambdaContext.py @@ -0,0 +1,15 @@ +import time +import unittest + +from aws_lambda.helpers import LambdaContext + + +class TestLambdaContext(unittest.TestCase): + def test_get_remaining_time_in_millis(self): + context = LambdaContext("function_name", 2000) + time.sleep(0.5) + self.assertTrue(context.get_remaining_time_in_millis() < 2000000) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_readHelper.py b/tests/unit/test_readHelper.py new file mode 100644 index 00000000..33c27529 --- /dev/null +++ b/tests/unit/test_readHelper.py @@ -0,0 +1,36 @@ +import os +import unittest + +import yaml + +from aws_lambda.helpers import read + + +class TestReadHelper(unittest.TestCase): + + TEST_FILE = "readTmp.txt" + + def setUp(self): + with open(TestReadHelper.TEST_FILE, "w") as tmp_file: + tmp_file.write("testYaml: testing") + + def tearDown(self): + os.remove(TestReadHelper.TEST_FILE) + + def test_read_no_loader_non_binary(self): + fileContents = read(TestReadHelper.TEST_FILE) + self.assertEqual(fileContents, "testYaml: testing") + + def test_read_yaml_loader_non_binary(self): + testYaml = read(TestReadHelper.TEST_FILE, loader=yaml.full_load) + self.assertEqual(testYaml["testYaml"], "testing") + + def test_read_no_loader_binary_mode(self): + fileContents = read(TestReadHelper.TEST_FILE, binary_file=True) + self.assertEqual(fileContents, b"testYaml: testing") + + def test_read_yaml_loader_binary_mode(self): + testYaml = read( + TestReadHelper.TEST_FILE, loader=yaml.full_load, binary_file=True + ) + self.assertEqual(testYaml["testYaml"], "testing")