diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml new file mode 100644 index 0000000..ecda8a3 --- /dev/null +++ b/.github/workflows/base.yml @@ -0,0 +1,173 @@ +# .github/workflows/base.yml +name: Build +on: + # this one is to trigger the workflow manually from the interface + workflow_dispatch: + + push: + tags: + - '*' + branches: + - main + pull_request: + branches: + - main +jobs: + # pre-job to read nox tests matrix - see https://stackoverflow.com/q/66747359/7262247 + list_nox_test_sessions: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v1 + with: + python-version: 3.7 + architecture: x64 + + - name: Install noxfile requirements + shell: bash -l {0} + run: pip install -r noxfile-requirements.txt + + - name: List 'tests' nox sessions + id: set-matrix + run: echo "::set-output name=matrix::$(nox -s gha_list -- tests)" + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} # save nox sessions list to outputs + + run_all_tests: + needs: list_nox_test_sessions + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest ] # , macos-latest, windows-latest] + # all nox sessions: manually > dynamically from previous job + # nox_session: ["tests-2.7", "tests-3.7"] + nox_session: ${{ fromJson(needs.list_nox_test_sessions.outputs.matrix) }} + + name: ${{ matrix.os }} ${{ matrix.nox_session }} # ${{ matrix.name_suffix }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + + # Conda install + - name: Install conda v3.7 + uses: conda-incubator/setup-miniconda@v2 + with: + # auto-update-conda: true + python-version: 3.7 + activate-environment: noxenv + - run: conda info + shell: bash -l {0} # so that conda works + - run: conda list + shell: bash -l {0} # so that conda works + + # Nox install + run + - name: Install noxfile requirements + shell: bash -l {0} # so that conda works + run: pip install -r noxfile-requirements.txt + - run: conda list + shell: bash -l {0} # so that conda works + - run: nox -s "${{ matrix.nox_session }}" + shell: bash -l {0} # so that conda works + + # Share ./docs/reports so that they can be deployed with doc in next job + - name: Share reports with other jobs + # if: matrix.nox_session == '...': not needed, if empty wont be shared + uses: actions/upload-artifact@master + with: + name: reports_dir + path: ./docs/reports + + publish_release: + needs: run_all_tests + runs-on: ubuntu-latest + if: github.event_name == 'push' + steps: + - name: GitHub context to debug conditional steps + env: + GITHUB_CONTEXT: ${{ toJSON(github) }} + run: echo "$GITHUB_CONTEXT" + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # so that gh-deploy works + + # 1) retrieve the reports generated previously + - name: Retrieve reports + uses: actions/download-artifact@master + with: + name: reports_dir + path: ./docs/reports + + # Conda install + - name: Install conda v3.7 + uses: conda-incubator/setup-miniconda@v2 + with: + # auto-update-conda: true + python-version: 3.7 + activate-environment: noxenv + - run: conda info + shell: bash -l {0} # so that conda works + - run: conda list + shell: bash -l {0} # so that conda works + + # Nox install + - name: Install noxfile requirements + shell: bash -l {0} # so that conda works + run: pip install -r noxfile-requirements.txt + - run: conda list + shell: bash -l {0} # so that conda works + + # 5) Run the flake8 report and badge + - name: Run flake8 analysis and generate corresponding badge + shell: bash -l {0} # so that conda works + run: nox -s flake8 + + # -------------- only on Ubuntu + MAIN PUSH (no pull request, no tag) ----------- + + # 5) Publish the doc and test reports + - name: \[not on TAG\] Publish documentation, tests and coverage reports + if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') # startsWith(matrix.os,'ubuntu') + shell: bash -l {0} # so that conda works + run: nox -s publish + + # 6) Publish coverage report + - name: \[not on TAG\] Create codecov.yaml with correct paths + if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') + shell: bash + run: | + cat << EOF > codecov.yml + # codecov.yml + fixes: + - "/home/runner/work/smarie/python-pyfields/::" # Correct paths + EOF + - name: \[not on TAG\] Publish coverage report + if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') + uses: codecov/codecov-action@v1 + with: + files: ./docs/reports/coverage/coverage.xml + + # -------------- only on Ubuntu + TAG PUSH (no pull request) ----------- + + # 7) Create github release and build the wheel + - name: \[TAG only\] Build wheel and create github release + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + shell: bash -l {0} # so that conda works + run: nox -s release -- ${{ secrets.GITHUB_TOKEN }} + + # 8) Publish the wheel on PyPi + - name: \[TAG only\] Deploy on PyPi + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + + delete-artifacts: + needs: publish_release + runs-on: ubuntu-latest + if: github.event_name == 'push' + steps: + - uses: kolpav/purge-artifacts-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + expire-in: 0 # Setting this to 0 will delete all artifacts diff --git a/.gitignore b/.gitignore index 8ca0c44..38d391a 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,8 @@ parts/ sdist/ var/ wheels/ +pip-wheel-metadata/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg @@ -38,14 +40,17 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +pyfields/_version.py # Translations *.mo @@ -55,6 +60,7 @@ coverage.xml *.log local_settings.py db.sqlite3 +db.sqlite3-journal # Flask stuff: instance/ @@ -72,11 +78,26 @@ target/ # Jupyter Notebook .ipynb_checkpoints +# IPython +profile_default/ +ipython_config.py + # pyenv .python-version -# celery beat schedule file +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py @@ -85,7 +106,7 @@ celerybeat-schedule .env .venv env/ -venv/ +venv*/ ENV/ env.bak/ venv.bak/ @@ -102,19 +123,20 @@ venv.bak/ # mypy .mypy_cache/ +.dmypy.json +dmypy.json -# Pycharm -.idea/ +# Pyre type checker +.pyre/ -# Mkdocs -site/ +# PyCharm development +/.idea -# travis CI -github_travis_rsa* -reports +# OSX +.DS_Store -# version -_version.py +# JUnit and coverage reports +docs/reports -# pip wheel -pip-wheel-metadata +# ODSClient cache +.odsclient diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 241479e..0000000 --- a/.travis.yml +++ /dev/null @@ -1,131 +0,0 @@ -language: python - -cache: pip - -matrix: - fast_finish: true - include: - - python: 2.7 - env: TYPE_CHECKER="none" - - python: 2.7 - env: TYPE_CHECKER="pytypes" - - python: 3.5.3 - env: TYPE_CHECKER="none" - - python: 3.5.3 - env: TYPE_CHECKER="pytypes" - - python: 3.5.3 - env: TYPE_CHECKER="typeguard" - - python: 3.5 - env: TYPE_CHECKER="none" - - python: 3.5 - env: TYPE_CHECKER="pytypes" - - python: 3.5 - env: TYPE_CHECKER="typeguard" - - python: 3.6 - env: TYPE_CHECKER="none" - - python: 3.6 - env: TYPE_CHECKER="pytypes" - - python: 3.6 - env: TYPE_CHECKER="typeguard" - - python: 3.7 - env: TYPE_CHECKER="none" - dist: xenial - sudo: true -# - python: 3.7 -# env: TYPE_CHECKER="pytypes" -# dist: xenial -# sudo: true - - python: 3.7 - env: TYPE_CHECKER="typeguard" - dist: xenial - sudo: true - -env: - global: - - GH_REF: git@github.com:smarie/python-pyfields.git - -before_install: - # (a) linux dependencies - - sudo apt-get install ant - - sudo apt-get install ant-optional # for junitreports - -install: - - pip list - - pip install six setuptools_scm # apparently python 2 requires this - - python ci_tools/py_install.py pip ci_tools/requirements-pip.txt - # this does not work anymore on python 2 so lets only do it when needed - - if [ "${TRAVIS_PYTHON_VERSION}" = "3.5" ]; then pip install mkdocs-material mkdocs; fi; - # travis-specific installs - - pip install PyGithub # for ci_tools/github_release.py - - pip install codecov # See https://github.com/codecov/example-python. - - pip list - -script: -# - coverage run tests.py - - pip install . - - python -c "import os; os.chdir('..'); import pyfields" -# ***tests*** -# - coverage run tests.py -# - pytest --junitxml=reports/junit/junit.xml --html=reports/junit/report.html --cov-report term-missing --cov=./pyfields -v pyfields/tests/ -# now done in a dedicated script to capture exit code 1 and transform it to 0 - - chmod a+x ./ci_tools/run_tests.sh - - if [ ! "${TYPE_CHECKER}" = "none" ]; then pip install "${TYPE_CHECKER}"; fi; - - sh ./ci_tools/run_tests.sh - - python ci_tools/generate-junit-badge.py 100 # generates the badge for the test results and fail build if less than x% - -after_success: -# ***reporting*** -# - junit2html junit.xml testrun.html output is really not nice - - ant -f ci_tools/generate-junit-html.xml # generates the html for the test results. Actually we dont use it anymore - - codecov - # - pylint pyfields # note that at the moment the report is simply lost, we dont transform the result into anything -# ***documentation*** - - mkdocs build -f docs/mkdocs.yml - - mv reports/junit docs/ # not in site/ anymore so that we do not need to use --dirty (it breaks client-side search) - # mkdocs gh-deploy requires special care : - # ---grant the possibility to push on the repo--- - - openssl aes-256-cbc -K $encrypted_f0bdab8dce0d_key -iv $encrypted_f0bdab8dce0d_iv -in ci_tools/github_travis_rsa.enc -out ci_tools/github_travis_rsa -d - # If the output file does not exist, that is because the secret is invalid. This can happen in forked repos so do not fail the build - - | - if [ -s "ci_tools/github_travis_rsa" ]; then - chmod 600 ci_tools/github_travis_rsa - eval `ssh-agent -s` # launch the authentication agent - ssh-add ci_tools/github_travis_rsa # register the decrypted key - git config user.name "Automatic Publish" - git config user.email "sylvain.marie@schneider-electric.com" - git remote add gh-remote "${GH_REF}"; - git fetch gh-remote && git fetch gh-remote gh-pages:gh-pages; # make sure we have the latest gh-remote - # push but only if this is not a build triggered by a pull request - # note: do not use the --dirty flag as it breaks client-side search - if [ "${TRAVIS_PULL_REQUEST}" = "false" ] && [ "${TRAVIS_PYTHON_VERSION}" = "3.5" ]; then echo "Pushing to github"; PYTHONPATH=pyfields/ mkdocs gh-deploy -v -f docs/mkdocs.yml --remote-name gh-remote; git push gh-remote gh-pages; fi; - else - echo "File 'ci_tools/github_travis_rsa' has not been created, please check your encrypted repo token in .travis.yml, on the line starting with 'openssl aes-256-cbc...'" - fi - # -- create the _version.py file - # - python ci_tools/write_version.py ./pyfields - -deploy: - # Deploy on PyPI on tags - - provider: pypi - user: "smarie" - password: - secure: "bXy76Y5rU56wmkUKOWSUAjEaV/hdXI9xp9scNptQci5MAs5EDbnEDNKKU+aoIocbPBCLLlVekjLu8XKM4BeJEpBtPF7VTiZdEHf6o4q4Kaz3TFzePGKSRrOqcCSJXKynw0ZdlENlOy0GRHtju75cxdYV+TQhTfd5NEI34bCh9lHM9/I0VKVHTSB6GXRmDAW7MoqTChszywfTSflSzcH5AlNBc0KdItZCf4zuTtKmyTFxZ1jDI0DQP4dGwgGKa+/tOW0YSo2FHIwezVUefdQh/+8goAOydaPpMmLCetp5ZavRyOnRp5ELLex0EsgwZ8rq12cCXHfwSmffamMdhRYLW1vthXkxyZOGZkuEaF6pK1E8KxtbtqOMnWEmJ3MMQCxyx8xfyodQ8QnZQrN3TL7v1FaScImofdSDuUNciCzJgIu5NPdAk6c3vMPdWiJ99BBhKbzDRa6jJLAeJn1AVZhiMM6tMBbr3MzHml5vCtMpiDaACyRluzARb5EHWY0j27hyXcJTr2NJ87dbDfgVG5ccZ55zJdJ3sk9+EwsWh+CJaaQlW80JU/y4fBw2lddP48CHKdqu5WetrGm9PAPCg2PuhnuNv0/QIgFG7pAZZd04brLyJwYnFpTr+l0/lxqqvjRU6865NyzUdcQBbzJEbMGHClhhbCgGoGrYGEWJpbLX+Js=" - on: - tags: true - python: 3.5 #only one of the builds have to be deployed - condition: $TYPE_CHECKER = "none" - # server: https://test.pypi.org/legacy/ - distributions: "sdist bdist_wheel" - - # Create a github release on tags - - provider: script - script: python ci_tools/github_release.py -s $GITHUB_TOKEN --repo-slug smarie/python-pyfields -cf ./docs/changelog.md -d https://smarie.github.io/python-pyfields/changelog/ $TRAVIS_TAG - skip_cleanup: true - on: - tags: true - python: 3.5 #only one of the builds have to be deployed - condition: $TYPE_CHECKER = "none" - -notifications: - email: - on_success: never # options: [always|never|change] default: always diff --git a/README.md b/README.md index 44eed4c..d980d39 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ *Define fields in python classes. Easily.* -[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://travis-ci.org/smarie/python-pyfields.svg?branch=master)](https://travis-ci.org/smarie/python-pyfields) [![Tests Status](https://smarie.github.io/python-pyfields/junit/junit-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/junit/report.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/master/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) +[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://github.com/smarie/python-pyfields/actions/workflows/base.yml/badge.svg)](https://github.com/smarie/python-pyfields/actions/workflows/base.yml) [![Tests Status](https://smarie.github.io/python-pyfields/reports/junit/junit-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/junit/report.html) [![Coverage Status](https://smarie.github.io/python-pyfields/reports/coverage/coverage-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/coverage/index.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/main/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) [![Flake8 Status](https://smarie.github.io/python-pyfields/reports/flake8/flake8-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/flake8/index.html) [![Documentation](https://img.shields.io/badge/doc-latest-blue.svg)](https://smarie.github.io/python-pyfields/) [![PyPI](https://img.shields.io/pypi/v/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Downloads](https://pepy.tech/badge/pyfields)](https://pepy.tech/project/pyfields) [![Downloads per week](https://pepy.tech/badge/pyfields/week)](https://pepy.tech/project/pyfields) [![GitHub stars](https://img.shields.io/github/stars/smarie/python-pyfields.svg)](https://github.com/smarie/python-pyfields/stargazers) @@ -14,54 +14,89 @@ Contributions are welcome ! Simply fork this project on github, commit your cont Here is a non-exhaustive list of interesting open topics: [https://github.com/smarie/python-pyfields/issues](https://github.com/smarie/python-pyfields/issues) -## Requirements for builds +## `nox` setup -Install requirements for setup beforehand using +This project uses `nox` to define all lifecycle tasks. In order to be able to run those tasks, you should create python 3.7 environment and install the requirements: ```bash -pip install -r ci_tools/requirements-pip.txt +>>> conda create -n noxenv python="3.7" +>>> activate noxenv +(noxenv) >>> pip install -r noxfile-requirements.txt ``` -## Running the tests +You should then be able to list all available tasks using: -This project uses `pytest`. +``` +>>> nox --list +Sessions defined in \noxfile.py: + +* tests-2.7 -> Run the test suite, including test reports generation and coverage reports. +* tests-3.5 -> Run the test suite, including test reports generation and coverage reports. +* tests-3.6 -> Run the test suite, including test reports generation and coverage reports. +* tests-3.8 -> Run the test suite, including test reports generation and coverage reports. +* tests-3.7 -> Run the test suite, including test reports generation and coverage reports. +- docs-3.7 -> Generates the doc and serves it on a local http server. Pass '-- build' to build statically instead. +- publish-3.7 -> Deploy the docs+reports on github pages. Note: this rebuilds the docs +- release-3.7 -> Create a release on github corresponding to the latest tag +``` + +## Running the tests and generating the reports + +This project uses `pytest` so running `pytest` at the root folder will execute all tests on current environment. However it is a bit cumbersome to manage all requirements by hand ; it is easier to use `nox` to run `pytest` on all supported python environments with the correct package requirements: ```bash -pytest -v pyfields/tests/ +nox ``` +Tests and coverage reports are automatically generated under `./docs/reports` for one of the sessions (`tests-3.7`). -## Packaging +If you wish to execute tests on a specific environment, use explicit session names, e.g. `nox -s tests-3.6`. + + +## Editing the documentation -This project uses `setuptools_scm` to synchronise the version number. Therefore the following command should be used for development snapshots as well as official releases: +This project uses `mkdocs` to generate its documentation page. Therefore building a local copy of the doc page may be done using `mkdocs build -f docs/mkdocs.yml`. However once again things are easier with `nox`. You can easily build and serve locally a version of the documentation site using: ```bash -python setup.py egg_info bdist_wheel rotate -m.whl -k3 +>>> nox -s docs +nox > Running session docs-3.7 +nox > Creating conda env in .nox\docs-3-7 with python=3.7 +nox > [docs] Installing requirements with pip: ['mkdocs-material', 'mkdocs', 'pymdown-extensions', 'pygments'] +nox > python -m pip install mkdocs-material mkdocs pymdown-extensions pygments +nox > mkdocs serve -f ./docs/mkdocs.yml +INFO - Building documentation... +INFO - Cleaning site directory +INFO - The following pages exist in the docs directory, but are not included in the "nav" configuration: + - long_description.md +INFO - Documentation built in 1.07 seconds +INFO - Serving on http://127.0.0.1:8000 +INFO - Start watching changes +... ``` -## Generating the documentation page +While this is running, you can edit the files under `./docs/` and browse the automatically refreshed documentation at the local [http://127.0.0.1:8000](http://127.0.0.1:8000) page. -This project uses `mkdocs` to generate its documentation page. Therefore building a local copy of the doc page may be done using: +Once you are done, simply hit `` to stop the session. -```bash -mkdocs build -f docs/mkdocs.yml -``` +Publishing the documentation (including tests and coverage reports) is done automatically by [the continuous integration engine](https://github.com/smarie/python-pyfields/actions), using the `nox -s publish` session, this is not needed for local development. + +## Packaging -## Generating the test reports +This project uses `setuptools_scm` to synchronise the version number. Therefore the following command should be used for development snapshots as well as official releases: `python setup.py sdist bdist_wheel`. However this is not generally needed since [the continuous integration engine](https://github.com/smarie/python-pyfields/actions) does it automatically for us on git tags. For reference, this is done in the `nox -s release` session. -The following commands generate the html test report and the associated badge. +### Merging pull requests with edits - memo + +Ax explained in github ('get commandline instructions'): ```bash -pytest --junitxml=junit.xml -v pyfields/tests/ -ant -f ci_tools/generate-junit-html.xml -python ci_tools/generate-junit-badge.py +git checkout -b - master +git pull https://github.com//python-pyfields.git --no-commit --ff-only ``` -### PyPI Releasing memo - -This project is now automatically deployed to PyPI when a tag is created. Anyway, for manual deployment we can use: +if the second step does not work, do a normal auto-merge (do not use **rebase**!): ```bash -twine upload dist/* -r pypitest -twine upload dist/* +git pull https://github.com//python-pyfields.git --no-commit ``` + +Finally review the changes, possibly perform some modifications, and commit. diff --git a/ci_tools/.gitignore b/ci_tools/.gitignore deleted file mode 100644 index 667b860..0000000 --- a/ci_tools/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -# private key -github_travis_rsa* - -# coverage results -.coverage \ No newline at end of file diff --git a/ci_tools/Readme-travis.md b/ci_tools/Readme-travis.md deleted file mode 100644 index e2de2d0..0000000 --- a/ci_tools/Readme-travis.md +++ /dev/null @@ -1,121 +0,0 @@ -This is a reminder on how to grant travis the rights to deploy your site on github pages, python package to pypi and release files to github - -# PREREQUISITE - -## Get access to a linux machine - -The following does not work on windows as explained [here](https://github.com/travis-ci/travis-ci/issues/4746) - -Note: if you get tlsV1 errors below, make sure that you have the latest OpenSSL. If it is not available from you package manager, look at this [compilation sequance](https://github.com/curl/curl/issues/1583#issuecomment-309477196) (replace all versions with the ones you need / the latest). No need to use nghttp2. Dont forget to add `--with-libssh2` to curl compilation step, as mentioned in the [script](https://github.com/dertin/lemp-stack-debian/blob/develop/install.sh)! - -## Install travis commandline - -You have to be outside of the proxy for everything to work correctly, otherwise you will get strange errors mentioning ipaddr... either here or later in the process. - -**1- Install ruby** using RVM : (**DO NOT USE su NOR sudo**) - -```bash -> \curl -sSL https://get.rvm.io | bash -s stable --ruby -> source /home/ubuntu/.rvm/scripts/rvm -> rvm install 2.5.1 (this installs to /home/ubuntu/.rvm/src/ruby...) -``` - -*Note: if you already have an old version of rvm you can update it to see the latest ruby versions*: -```bash -> rvm get master -> rvm list known -``` - -*Note: if at some point there is an openssl issue inside ruby it is possible to either make it use the path you like or have it use its own version as explained [here](https://stackoverflow.com/questions/15511943/troubles-with-rvm-and-openssl)*: - -Either -```bash -> rvm install 2.5.1 --with-openssl-dir=/usr/local/ssl or (does not work) /usr/bin/openssl -``` - -or - -```bash -> rvm pkg install openssl -> rvm install 2.5.1 --with-openssl-dir=$HOME/.rvm/usr -``` - -**2- install travis commandline** by following [these instructions](https://github.com/travis-ci/travis.rb#installation): - -```bash -> gem install travis -v 1.8.8 --no-rdoc --no-ri -``` - -source: - * http://railsapps.github.io/installrubyonrails-ubuntu.html - * http://sirupsen.com/get-started-right-with-rvm/ - - -## Optional: setup a shared folder between your development machine and the linux machine - -If possible the shared folder should be the git folder, so that travis automatically detects the git project. - - -# Generating the access keys for travis - -## To deploy a site on gh-pages using `mkdocs gh-deploy` (or for any `git push` operation) - -Generate an asymetric security key (public + private): - - * On windows: open git bash (not windows cmd) - * Execute the following but **DO NOT provide any passphrase when prompted (simply press )** - -```bash -ssh-keygen -t rsa -b 4096 -C "" -f ci_tools/github_travis_rsa -``` - -On the github repository page, `Settings > Deploy Keys > Add deploy key > add` the PUBLIC generated key (the file `ci_tools/github_travis_rsa.pub`) with write access - - -Use travis CLI to encrypt your PRIVATE key: - -```bash -> cd to the shared folder (/media/...) -> source /home/ubuntu/.rvm/scripts/rvm -> travis login -> travis encrypt-file -r / ci_tools/github_travis_rsa (DO NOT USE --add option since it will remove all comments in your travis.yml file!) -``` - -Follow the instructions on screen : -- copy the line starting with `openssl ...` to your `travis.yml` file. -- modify the relative path to the generated file by adding 'ci_tools/' in front of 'github_travis_rsa_...enc'. -- git add the generated file 'github_travis_rsa_...enc' but DO NOT ADD the private key - -Note: if you find bug 'no implicit conversion of nil intro String' as mentioned [here](https://github.com/travis-ci/travis.rb/issues/190#issuecomment-377823703), [here](https://github.com/travis-ci/travis.rb/issues/585#issuecomment-374307229) and especially [here](https://github.com/travis-ci/travis.rb/issues/586) it can either be a network proxy error (check that http_proxy is not set...) or a ruby/travis cli version issue. Or worse: an openssl version issue (you check check with wireshark). Best is to reinstall at least the gems: `rvm gemset empty` and then `gem install travis ...` (see above). Note that reinstalling ruby takes a *lot* more time than reinstalling the gems :). - -source: - * https://djw8605.github.io/2017/02/08/deploying-docs-on-github-with-travisci/ (rejecting https://docs.travis-ci.com/user/deployment/pages/ as this would grant full access to travis) - * https://docs.travis-ci.com/user/encrypting-files/ - * https://gist.github.com/domenic/ec8b0fc8ab45f39403dd - -## To deploy python wheels on PyPi - -Similar procedure to encrypt the PyPi password for deployments: - -```bash -> (cd, source, travis login) -> travis encrypt -r / -``` -Copy the resulting string in the `travis.yml` file under deploy > provider: pypi > password > secure - -source: https://docs.travis-ci.com/user/deployment/pypi/ - - -## To deploy file releases on github - -Similar procedure to encrypt the OAuth password for github releases. **WARNING** unlike 'travis encrypt', this WILL modify your `travis.yml` file. Therefore you should make a backup of it beforehand, and then execute this command with the '--force' option. - -```bash -> (cd, source, travis login) -> travis login -> travis setup releases -``` - -Copy the string in the `travis.yml` file under deploy > provider: releases > api-key > secure - -source: https://docs.travis-ci.com/user/deployment/releases/ \ No newline at end of file diff --git a/ci_tools/check_python_version.py b/ci_tools/check_python_version.py new file mode 100644 index 0000000..9eaae79 --- /dev/null +++ b/ci_tools/check_python_version.py @@ -0,0 +1,28 @@ +import sys + +if __name__ == "__main__": + # Execute only if run as a script. + # Check the arguments + nbargs = len(sys.argv[1:]) + if nbargs != 1: + raise ValueError("a mandatory argument is required: ") + + expected_version_str = sys.argv[1] + try: + expected_version = tuple(int(i) for i in expected_version_str.split(".")) + except Exception as e: + raise ValueError("Error while parsing expected version %r: %r" % (expected_version, e)) + + if len(expected_version) < 1: + raise ValueError("At least a major is expected") + + if sys.version_info[0] != expected_version[0]: + raise AssertionError("Major version does not match. Expected %r - Actual %r" % (expected_version_str, sys.version)) + + if len(expected_version) >= 2 and sys.version_info[1] != expected_version[1]: + raise AssertionError("Minor version does not match. Expected %r - Actual %r" % (expected_version_str, sys.version)) + + if len(expected_version) >= 3 and sys.version_info[2] != expected_version[2]: + raise AssertionError("Patch version does not match. Expected %r - Actual %r" % (expected_version_str, sys.version)) + + print("SUCCESS - Actual python version %r matches expected one %r" % (sys.version, expected_version_str)) diff --git a/ci_tools/flake8-requirements.txt b/ci_tools/flake8-requirements.txt new file mode 100644 index 0000000..37d58b6 --- /dev/null +++ b/ci_tools/flake8-requirements.txt @@ -0,0 +1,16 @@ +setuptools_scm>=3,<4 +flake8>=3.6,<4 +flake8-html>=0.4,<1 +flake8-bandit>=2.1.1,<3 +bandit<1.7.3 # To revert later +flake8-bugbear>=20.1.0,<21.0.0 +flake8-docstrings>=1.5,<2 +flake8-print>=3.1.1,<4 +flake8-tidy-imports>=4.2.1,<5 +flake8-copyright==0.2.2 # Internal forked repo to fix an issue, keep specific version +pydocstyle>=5.1.1,<6 +pycodestyle>=2.6.0,<3 +mccabe>=0.6.1,<1 +naming>=0.5.1,<1 +pyflakes>=2.2,<3 +genbadge[flake8] diff --git a/ci_tools/generate-junit-badge.py b/ci_tools/generate-junit-badge.py deleted file mode 100644 index f33f9ea..0000000 --- a/ci_tools/generate-junit-badge.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -try: - # python 3 - from urllib.parse import quote_plus -except ImportError: - # python 2 - from urllib import quote_plus - -import requests -import shutil -from os import makedirs, path -import xunitparser - - -class TestStats(object): - def __init__(self, success_percentage, success, runned, skipped, errors): - self.success_percentage = success_percentage - self.success = success - self.runned = runned - self.skipped = skipped - self.errors = errors - - -def get_test_stats(junit_xml='reports/junit/junit.xml' # type: str - ): - # type: (...) -> TestStats - """ - read the junit test file and extract the success percentage - :param junit_xml: the junit xml file path - :return: the success percentage (an int) - """ - ts, tr = xunitparser.parse(open(junit_xml)) - skipped = len(tr.skipped) - runned = tr.testsRun - skipped - failed = len(tr.failures) - errors = len(tr.errors) - success = runned - failed - - success_percentage = round(success * 100 / (runned + errors)) - - return TestStats(success_percentage, success, runned, skipped, errors) - - -def download_badge(test_stats, # type: TestStats - dest_folder='reports/junit' # type: str - ): - """ - Downloads the badge corresponding to the provided success percentage, from https://img.shields.io. - - :param test_stats: - :param dest_folder: - :return: - """ - if not path.exists(dest_folder): - makedirs(dest_folder) # , exist_ok=True) not python 2 compliant - - if test_stats.success_percentage < 50: - color = 'red' - elif test_stats.success_percentage < 75: - color = 'orange' - elif test_stats.success_percentage < 90: - color = 'green' - else: - color = 'brightgreen' - - left_txt = "tests" - # right_txt = "%s%%" % test_stats.success_percentage - right_txt = "%s/%s" % (test_stats.success, (test_stats.runned + test_stats.errors)) - url = 'https://img.shields.io/badge/%s-%s-%s.svg' % (left_txt, quote_plus(right_txt), color) - - dest_file = path.join(dest_folder, 'junit-badge.svg') - - print('Generating junit badge from : ' + url) - response = requests.get(url, stream=True) - with open(dest_file, 'wb') as out_file: - response.raw.decode_content = True - shutil.copyfileobj(response.raw, out_file) - del response - - -if __name__ == "__main__": - # Execute only if run as a script. - # Check the arguments - assert len(sys.argv[1:]) == 1, "a single mandatory argument is required: " - threshold = float(sys.argv[1]) - - # First retrieve the success percentage from the junit xml - test_stats = get_test_stats() - - # Validate against the threshold - print("Success percentage is %s%%. Checking that it is >= %s" % (test_stats.success_percentage, threshold)) - if test_stats.success_percentage < threshold: - raise Exception("Success percentage %s%% is strictly lower than required threshold %s%%" - "" % (test_stats.success_percentage, threshold)) - - # Download the badge - download_badge(test_stats) diff --git a/ci_tools/generate-junit-html.xml b/ci_tools/generate-junit-html.xml deleted file mode 100644 index 9f1e7a9..0000000 --- a/ci_tools/generate-junit-html.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/ci_tools/github_release.py b/ci_tools/github_release.py index cb5af4e..1738a92 100644 --- a/ci_tools/github_release.py +++ b/ci_tools/github_release.py @@ -1,7 +1,3 @@ -# Authors: Sylvain Marie -# -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. - # a clone of the ruby example https://gist.github.com/valeriomazzeo/5491aee76f758f7352e2e6611ce87ec1 import os from os import path diff --git a/ci_tools/github_travis_rsa.enc b/ci_tools/github_travis_rsa.enc deleted file mode 100644 index ef22605..0000000 Binary files a/ci_tools/github_travis_rsa.enc and /dev/null differ diff --git a/ci_tools/nox_utils.py b/ci_tools/nox_utils.py new file mode 100644 index 0000000..4cb8d7d --- /dev/null +++ b/ci_tools/nox_utils.py @@ -0,0 +1,799 @@ +from itertools import product + +import asyncio +from collections import namedtuple +from inspect import signature, isfunction +import logging +from pathlib import Path +import shutil +import subprocess +import sys +import os + +from typing import Sequence, Dict, Union, Iterable, Mapping, Any, IO, Tuple, Optional, List + +from makefun import wraps, remove_signature_parameters, add_signature_parameters + +import nox +from nox.sessions import Session + + +nox_logger = logging.getLogger("nox") + + +PY27, PY35, PY36, PY37, PY38, PY39, PY310 = "2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10" +DONT_INSTALL = "dont_install" + + +def power_session( + func=None, + envs=None, + grid_param_name="env", + python=None, + py=None, + reuse_venv=None, + name=None, + venv_backend=None, + venv_params=None, + logsdir=None, + **kwargs +): + """A nox.session on steroids + + :param func: + :param envs: a dictionary {key: dict_of_params} where key is either the python version of a tuple (python version, + grid id) and all keys in the dict_of_params must be the same in all entries. The decorated function should + have one parameter for each of these keys, they will be injected with the value. + :param grid_param_name: when the key in `envs` is a tuple, this name will be the name of the generated parameter to + iterate through the various combinations for each python version. + :param python: + :param py: + :param reuse_venv: + :param name: + :param venv_backend: + :param venv_params: + :param logsdir: + :param kwargs: + :return: + """ + if func is not None: + return power_session()(func) + else: + def combined_decorator(f): + # replace Session with PowerSession + f = with_power_session(f) + + # open a log file for the session, use it to stream the commands stdout and stderrs, + # and possibly inject the log file in the session function + if logsdir is not None: + f = with_logfile(logs_dir=logsdir)(f) + + # decorate with @nox.session and possibly @nox.parametrize to create the grid + return nox_session_with_grid(python=python, py=py, envs=envs, reuse_venv=reuse_venv, name=name, + grid_param_name=grid_param_name, venv_backend=venv_backend, + venv_params=venv_params, **kwargs)(f) + + return combined_decorator + + +def with_power_session(f=None): + """ A decorator to patch the session objects in order to add all methods from Session2""" + + if f is not None: + return with_power_session()(f) + + def _decorator(f): + @wraps(f) + def _f_wrapper(**kwargs): + # patch the session arg + PowerSession.patch(kwargs['session']) + + # finally execute the session + return f(**kwargs) + + return _f_wrapper + + return _decorator + + +class PowerSession(Session): + """ + Our nox session improvements + """ + + # ------------ commandline runners ----------- + + def run2(self, + command: Union[Iterable[str], str], + logfile: Union[bool, str, Path] = True, + **kwargs): + """ + An improvement of session.run that is able to + + - automatically split the provided command if it is a string + - use a log file + + :param command: + :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. + :param kwargs: + :return: + """ + if isinstance(command, str): + command = command.split(' ') + + self.run(*command, logfile=logfile, **kwargs) + + def run_multi(self, + cmds: str, + logfile: Union[bool, str, Path] = True, + **kwargs): + """ + An improvement of session.run that is able to + + - support multiline strings + - use a log file + + :param cmds: + :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. + :param kwargs: + :return: + """ + for cmdline in (line for line in cmds.splitlines() if line): + self.run2(cmdline, logfile=logfile, **kwargs) + + # ------------ requirements installers ----------- + + def install_reqs( + self, + # pre wired phases + setup=False, + install=False, + tests=False, + extras=(), + # custom phase + phase=None, + phase_reqs=None, + versions_dct=None + ): + """ + A high-level helper to install requirements from the various project files + + - pyproject.toml "[build-system] requires" (if setup=True) + - setup.cfg "[options] setup_requires" (if setup=True) + - setup.cfg "[options] install_requires" (if install=True) + - setup.cfg "[options] test_requires" (if tests=True) + - setup.cfg "[options.extras_require] <...>" (if extras=(a tuple of extras)) + + Two additional mechanisms are provided in order to customize how packages are installed. + + Conda packages + -------------- + If the session runs on a conda environment, you can add a [tool.conda] section to your pyproject.toml. This + section should contain a `conda_packages` entry containing the list of package names that should be installed + using conda instead of pip. + + ``` + [tool.conda] + # Declare that the following packages should be installed with conda instead of pip + # Note: this includes packages declared everywhere, here and in setup.cfg + conda_packages = [ + "setuptools", + "wheel", + "pip" + ] + ``` + + Version constraints + ------------------- + In addition to the version constraints in the pyproject.toml and setup.cfg, you can specify additional temporary + constraints with the `versions_dct` argument , for example if you know that this executes on a specific python + version that requires special care. + For this, simply pass a dictionary of {'pkg_name': 'pkg_constraint'} for example {"pip": ">10"}. + + """ + + # Read requirements from pyproject.toml + toml_setup_reqs, toml_use_conda_for = read_pyproject_toml() + if setup: + self.install_any("pyproject.toml#build-system", toml_setup_reqs, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + # Read test requirements from setup.cfg + setup_cfg = read_setuptools_cfg() + if setup: + self.install_any("setup.cfg#setup_requires", setup_cfg.setup_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + if install: + self.install_any("setup.cfg#install_requires", setup_cfg.install_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + if tests: + self.install_any("setup.cfg#tests_requires", setup_cfg.tests_requires, + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + for extra in extras: + self.install_any("setup.cfg#extras_require#%s" % extra, setup_cfg.extras_require[extra], + use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + if phase is not None: + self.install_any(phase, phase_reqs, use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + + def install_any(self, + phase_name: str, + pkgs: Sequence[str], + use_conda_for: Sequence[str] = (), + versions_dct: Dict[str, str] = None, + logfile: Union[bool, str, Path] = True, + ): + """Install the `pkgs` provided with `session.install(*pkgs)`, except for those present in `use_conda_for`""" + + nox_logger.debug("\nAbout to install *%s* requirements: %s.\n " + "Conda pkgs are %s" % (phase_name, pkgs, use_conda_for)) + + # use the provided versions dictionary to update the versions + if versions_dct is None: + versions_dct = dict() + pkgs = [pkg + versions_dct.get(pkg, "") for pkg in pkgs if versions_dct.get(pkg, "") != DONT_INSTALL] + + # install on conda... if the session uses conda backend + if not isinstance(self.virtualenv, nox.virtualenv.CondaEnv): + conda_pkgs = [] + else: + conda_pkgs = [pkg_req for pkg_req in pkgs if any(get_req_pkg_name(pkg_req) == c for c in use_conda_for)] + if len(conda_pkgs) > 0: + nox_logger.info("[%s] Installing requirements with conda: %s" % (phase_name, conda_pkgs)) + self.conda_install2(*conda_pkgs, logfile=logfile) + + pip_pkgs = [pkg_req for pkg_req in pkgs if pkg_req not in conda_pkgs] + # safety: make sure that nothing went modified or forgotten + assert set(conda_pkgs).union(set(pip_pkgs)) == set(pkgs) + if len(pip_pkgs) > 0: + nox_logger.info("[%s] Installing requirements with pip: %s" % (phase_name, pip_pkgs)) + self.install2(*pip_pkgs, logfile=logfile) + + def conda_install2(self, + *conda_pkgs, + logfile: Union[bool, str, Path] = True, + **kwargs + ): + """ + Same as session.conda_install() but with support for `logfile`. + + :param conda_pkgs: + :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. + :return: + """ + return self.conda_install(*conda_pkgs, logfile=logfile, **kwargs) + + def install2(self, + *pip_pkgs, + logfile: Union[bool, str, Path] = True, + **kwargs + ): + """ + Same as session.install() but with support for `logfile`. + + :param pip_pkgs: + :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. + :return: + """ + return self.install(*pip_pkgs, logfile=logfile, **kwargs) + + def get_session_id(self): + """Return the session id""" + return Path(self.bin).name + + @classmethod + def is_power_session(cls, session: Session): + return PowerSession.install2.__name__ in session.__dict__ + + @classmethod + def patch(cls, session: Session): + """ + Add all methods from this class to the provided object. + Note that we could instead have created a proper proxy... but complex for not a lot of benefit. + :param session: + :return: + """ + if not cls.is_power_session(session): + for m_name, m in cls.__dict__.items(): + if not isfunction(m): + continue + if m is cls.patch: + continue + if not hasattr(session, m_name): + setattr(session.__class__, m_name, m) + + return True + + +# ------------- requirements related + + +def read_pyproject_toml(): + """ + Reads the `pyproject.toml` and returns + + - a list of setup requirements from [build-system] requires + - sub-list of these requirements that should be installed with conda, from [tool.my_conda] conda_packages + """ + if os.path.exists("pyproject.toml"): + import toml + nox_logger.debug("\nA `pyproject.toml` file exists. Loading it.") + pyproject = toml.load("pyproject.toml") + requires = pyproject['build-system']['requires'] + conda_pkgs = pyproject['tool']['conda']['conda_packages'] + return requires, conda_pkgs + else: + raise FileNotFoundError("No `pyproject.toml` file exists. No dependency will be installed ...") + + +SetupCfg = namedtuple('SetupCfg', ('setup_requires', 'install_requires', 'tests_requires', 'extras_require')) + + +def read_setuptools_cfg(): + """ + Reads the `setup.cfg` file and extracts the various requirements lists + """ + # see https://stackoverflow.com/a/30679041/7262247 + from setuptools import Distribution + dist = Distribution() + dist.parse_config_files() + return SetupCfg(setup_requires=dist.setup_requires, + install_requires=dist.install_requires, + tests_requires=dist.tests_require, + extras_require=dist.extras_require) + + +def get_req_pkg_name(r): + """Return the package name part of a python package requirement. + + For example + "funcsigs;python<'3.5'" will return "funcsigs" + "pytest>=3" will return "pytest" + """ + return r.replace('<', '=').replace('>', '=').replace(';', '=').split("=")[0] + + +# ------------- log related + + +def with_logfile(logs_dir: Path, + logfile_arg: str = "logfile", + logfile_handler_arg: str = "logfilehandler" + ): + """ A decorator to inject a logfile""" + + def _decorator(f): + # check the signature of f + foo_sig = signature(f) + needs_logfile_injection = logfile_arg in foo_sig.parameters + needs_logfilehandler_injection = logfile_handler_arg in foo_sig.parameters + + # modify the exposed signature if needed + new_sig = None + if needs_logfile_injection: + new_sig = remove_signature_parameters(foo_sig, logfile_arg) + if needs_logfilehandler_injection: + new_sig = remove_signature_parameters(foo_sig, logfile_handler_arg) + + @wraps(f, new_sig=new_sig) + def _f_wrapper(**kwargs): + # find the session arg + session = kwargs['session'] # type: Session + + # add file handler to logger + logfile = logs_dir / ("%s.log" % PowerSession.get_session_id(session)) + error_logfile = logfile.with_name("ERROR_%s" % logfile.name) + success_logfile = logfile.with_name("SUCCESS_%s" % logfile.name) + # delete old files if present + for _f in (logfile, error_logfile, success_logfile): + if _f.exists(): + _f.unlink() + + # add a FileHandler to the logger + logfile_handler = log_to_file(logfile) + + # inject the log file / log file handler in the args: + if needs_logfile_injection: + kwargs[logfile_arg] = logfile + if needs_logfilehandler_injection: + kwargs[logfile_handler_arg] = logfile_handler + + # finally execute the session + try: + res = f(**kwargs) + except Exception as e: + # close and detach the file logger and rename as ERROR_....log + remove_file_logger() + logfile.rename(error_logfile) + raise e + else: + # close and detach the file logger and rename as SUCCESS_....log + remove_file_logger() + logfile.rename(success_logfile) + return res + + return _f_wrapper + + return _decorator + + +def log_to_file(file_path: Union[str, Path] + ): + """ + Closes and removes all file handlers from the nox logger, + and add a new one to the provided file path + + :param file_path: + :return: + """ + for h in list(nox_logger.handlers): + if isinstance(h, logging.FileHandler): + h.close() + nox_logger.removeHandler(h) + fh = logging.FileHandler(str(file_path), mode='w') + nox_logger.addHandler(fh) + return fh + + +def get_current_logfile_handler(): + """ + Returns the current unique log file handler (see `log_to_file`) + """ + for h in list(nox_logger.handlers): + if isinstance(h, logging.FileHandler): + return h + return None + + +def get_log_file_stream(): + """ + Returns the output stream for the current log file handler if any (see `log_to_file`) + """ + h = get_current_logfile_handler() + if h is not None: + return h.stream + return None + + +def remove_file_logger(): + """ + Closes and detaches the current logfile handler + :return: + """ + h = get_current_logfile_handler() + if h is not None: + h.close() + nox_logger.removeHandler(h) + + +# ------------ environment grid / parametrization related + +def nox_session_with_grid(python = None, + py = None, + envs: Mapping[str, Mapping[str, Any]] = None, + reuse_venv: Optional[bool] = None, + name: Optional[str] = None, + venv_backend: Any = None, + venv_params: Any = None, + grid_param_name: str = None, + **kwargs + ): + """ + Since nox is not yet capable to define a build matrix with python and parameters mixed in the same parametrize + this implements it with a dirty hack. + To remove when https://github.com/theacodes/nox/pull/404 is complete + + :param envs: + :param env_python_key: + :return: + """ + if envs is None: + # Fast track default to @nox.session + return nox.session(python=python, py=py, reuse_venv=reuse_venv, name=name, venv_backend=venv_backend, + venv_params=venv_params, **kwargs) + else: + # Current limitation : session param names can be 'python' or 'py' only + if py is not None or python is not None: + raise ValueError("`python` session argument can not be provided both directly and through the " + "`env` with `session_param_names`") + + # First examine the env and collect the parameter values for python + all_python = [] + all_params = [] + + env_contents_names = None + has_parameter = None + for env_id, env_params in envs.items(): + # consistency checks for the env_id + if has_parameter is None: + has_parameter = isinstance(env_id, tuple) + else: + if has_parameter != isinstance(env_id, tuple): + raise ValueError("All keys in env should be tuples, or not be tuples. Error for %r" % env_id) + + # retrieve python version and parameter + if not has_parameter: + if env_id not in all_python: + all_python.append(env_id) + else: + if len(env_id) != 2: + raise ValueError("Only a size-2 tuple can be used as env id") + py_id, param_id = env_id + if py_id not in all_python: + all_python.append(py_id) + if param_id not in all_params: + all_params.append(param_id) + + # consistency checks for the dict contents. + if env_contents_names is None: + env_contents_names = set(env_params.keys()) + else: + if env_contents_names != set(env_params.keys()): + raise ValueError("Environment %r parameters %r does not match parameters in the first environment: %r" + % (env_id, env_contents_names, set(env_params.keys()))) + + if has_parameter and not grid_param_name: + raise ValueError("You must provide a grid parameter name when the env keys are tuples.") + + def _decorator(f): + s_name = name if name is not None else f.__name__ + for pyv, _param in product(all_python, all_params): + if (pyv, _param) not in envs: + # create a dummy folder to avoid creating a useless venv ? + env_dir = Path(".nox") / ("%s-%s-%s-%s" % (s_name, pyv.replace('.', '-'), grid_param_name, _param)) + env_dir.mkdir(parents=True, exist_ok=True) + + # check the signature of f + foo_sig = signature(f) + missing = env_contents_names - set(foo_sig.parameters) + if len(missing) > 0: + raise ValueError("Session function %r does not contain environment parameter(s) %r" % (f.__name__, missing)) + + # modify the exposed signature if needed + new_sig = None + if len(env_contents_names) > 0: + new_sig = remove_signature_parameters(foo_sig, *env_contents_names) + + if has_parameter: + if grid_param_name in foo_sig.parameters: + raise ValueError("Internal error, this parameter has a reserved name: %r" % grid_param_name) + else: + new_sig = add_signature_parameters(new_sig, last=(grid_param_name,)) + + @wraps(f, new_sig=new_sig) + def _f_wrapper(**kwargs): + # find the session arg + session = kwargs['session'] # type: Session + + # get the versions to use for this environment + try: + if has_parameter: + grid_param = kwargs.pop(grid_param_name) + params_dct = envs[(session.python, grid_param)] + else: + params_dct = envs[session.python] + except KeyError: + # Skip this session, it is a dummy one + nox_logger.warning( + "Skipping configuration, this is not supported in python version %r" % session.python) + return + + # inject the parameters in the args: + kwargs.update(params_dct) + + # finally execute the session + return f(**kwargs) + + if has_parameter: + _f_wrapper = nox.parametrize(grid_param_name, all_params)(_f_wrapper) + + _f_wrapper = nox.session(python=all_python, reuse_venv=reuse_venv, name=name, + venv_backend=venv_backend, venv_params=venv_params)(_f_wrapper) + return _f_wrapper + + return _decorator + + +# ----------- other goodies + + +def rm_file(folder: Union[str, Path] + ): + """Since on windows Path.unlink throws permission error sometimes, os.remove is preferred.""" + if isinstance(folder, str): + folder = Path(folder) + + if folder.exists(): + os.remove(str(folder)) + # Folders.site.unlink() --> possible PermissionError + + +def rm_folder(folder: Union[str, Path] + ): + """Since on windows Path.unlink throws permission error sometimes, shutil is preferred.""" + if isinstance(folder, str): + folder = Path(folder) + + if folder.exists(): + shutil.rmtree(str(folder)) + # Folders.site.unlink() --> possible PermissionError + + +# --- the patch of popen able to tee to logfile -- + + +import nox.popen as nox_popen_module +orig_nox_popen = nox_popen_module.popen + + +class LogFileStreamCtx: + def __init__(self, logfile_stream): + self.logfile_stream = logfile_stream + + def __enter__(self): + return self.logfile_stream + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + +def patched_popen( + args: Sequence[str], + env: Mapping[str, str] = None, + silent: bool = False, + stdout: Union[int, IO] = None, + stderr: Union[int, IO] = subprocess.STDOUT, + logfile: Union[bool, str, Path] = None, + **kwargs +) -> Tuple[int, str]: + """ + Our patch of nox.popen.popen(). + + Current behaviour in `nox` is + + - when `silent=True` (default), process err is redirected to STDOUT and process out is captured in a PIPE and sent + to the logger (that does not displaying it :) ) + + - when `silent=False` (explicitly set, or when nox is run with verbose flag), process out and process err are both + redirected to STDOUT. + + Our implementation allows us to be a little more flexible: + + - if logfile is True or a string/Path, both process err and process out are both TEE-ed to logfile + - at the same time, the above behaviour remains. + + :param args: + :param env: + :param silent: + :param stdout: + :param stderr: + :param logfile: None/False (normal nox behaviour), or True (using nox file handler), or a file path. + :return: + """ + logfile_stream = get_log_file_stream() + + if logfile in (None, False) or (logfile is True and logfile_stream is None): + # execute popen as usual + return orig_nox_popen(args=args, env=env, silent=silent, stdout=stdout, stderr=stderr, **kwargs) + + else: + # we'll need to tee the popen + if logfile is True: + ctx = LogFileStreamCtx + else: + ctx = lambda _: open(logfile, "a") + + with ctx(logfile_stream) as log_file_stream: + if silent and stdout is not None: + raise ValueError( + "Can not specify silent and stdout; passing a custom stdout always silences the commands output in " + "Nox's log." + ) + + shell = kwargs.get("shell", False) + if shell: + raise ValueError("Using shell=True is not yet supported with async streaming to log files") + + if stdout is not None or stderr is not subprocess.STDOUT: + raise ValueError("Using custom streams is not yet supported with async popen") + + # old way + # proc = subprocess.Popen(args, env=env, stdout=stdout, stderr=stderr) + + # New way: use asyncio to stream correctly + # Note: if keyboard interrupts do not work we should check + # https://mail.python.org/pipermail/async-sig/2017-August/000374.html maybe or the following threads. + + # define the async coroutines + async def async_popen(): + process = await asyncio.create_subprocess_exec(*args, env=env, stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, **kwargs) + + # bind the out and err streams - see https://stackoverflow.com/a/59041913/7262247 + # to mimic nox behaviour we only use a single capturing list + outlines = [] + await asyncio.wait([ + # process out is only redirected to STDOUT if not silent + _read_stream(process.stdout, lambda l: tee(l, sinklist=outlines, sinkstream=log_file_stream, + quiet=silent, verbosepipe=sys.stdout)), + # process err is always redirected to STDOUT (quiet=False) with a specific label + _read_stream(process.stderr, lambda l: tee(l, sinklist=outlines, sinkstream=log_file_stream, + quiet=False, verbosepipe=sys.stdout, label="ERR:")) + ]) + return_code = await process.wait() # make sur the process has ended and retrieve its return code + return return_code, outlines + + # run the coroutine in the event loop + loop = asyncio.get_event_loop() + return_code, outlines = loop.run_until_complete(async_popen()) + + # just in case, flush everything + log_file_stream.flush() + sys.stdout.flush() + sys.stderr.flush() + + if silent: + # same behaviour as in nox: this will be passed to the logger, and it will act depending on verbose flag + out = "\n".join(outlines) if len(outlines) > 0 else "" + else: + # already written to stdout, no need to capture + out = "" + + return return_code, out + + +async def _read_stream(stream, callback): + """Helper async coroutine to read from a stream line by line and write them in callback""" + while True: + line = await stream.readline() + if line: + callback(line) + else: + break + + +def tee(linebytes, sinklist, sinkstream, verbosepipe, quiet, label=""): + """ + Helper routine to read a line, decode it, and append it to several sinks: + + - an optional `sinklist` list that will receive the decoded string in its "append" method + - an optional `sinkstream` stream that will receive the decoded string in its "writelines" method + - an optional `verbosepipe` stream that will receive only when quiet=False, the decoded string through a print + + append it to the sink, and if quiet=False, write it to pipe too. + """ + line = linebytes.decode('utf-8').rstrip() + + if sinklist is not None: + sinklist.append(line) + + if sinkstream is not None: + sinkstream.write(line + "\n") + sinkstream.flush() + + if not quiet and verbosepipe is not None: + print(label, line, file=verbosepipe) + verbosepipe.flush() + + +def patch_popen(): + nox_popen_module.popen = patched_popen + + from nox.command import popen + if popen is not patched_popen: + nox.command.popen = patched_popen + + # change event loop on windows + # see https://stackoverflow.com/a/44639711/7262247 + # and https://docs.python.org/3/library/asyncio-platforms.html#subprocess-support-on-windows + if 'win32' in sys.platform: + # Windows specific event-loop policy & cmd + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) + # cmds = [['C:/Windows/system32/HOSTNAME.EXE']] + + # loop = asyncio.ProactorEventLoop() + # asyncio.set_event_loop(loop) + + +patch_popen() diff --git a/ci_tools/py_install.py b/ci_tools/py_install.py deleted file mode 100644 index 7d664ba..0000000 --- a/ci_tools/py_install.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -equivalent of pip install -r - - with environment variables replacement - - and all dependencies are installed in one 'pip install' call (solving potential complex deps) -""" -import os - -import re -import sys -import subprocess - - -def check_cmd(cmd): - assert isinstance(cmd, str), "cmd should be a string" - assert cmd in {"pip", "conda"}, "cmd should be conda or pip. Unknown: " + str(cmd) - - -def install(cmd, packages): - """ - Installs all packages provided at once - :param packages: - :return: - """ - check_cmd(cmd) - - all_pkgs_str = " ".join(all_pkgs) - print("INSTALLING: " + cmd + " install " + all_pkgs_str) - subprocess.check_call([cmd, 'install'] + packages) # install pkg - - -env_var_regexp = re.compile(".*\$(\S+).*") - - -if __name__ == '__main__': - assert len(sys.argv[1:]) >= 2, "at least two mandatory arguments are required: " - - cmd = sys.argv[1] - check_cmd(cmd) - - filenames = sys.argv[2:] - - all_pkgs = [] - for filename in filenames: - with open(filename) as f: - for line in f.readlines(): - # First remove any comment on that line - splitted = line.split('#', 1) # (maxsplit=1) but python 2 does not support it :) - splitted = splitted[0].strip().rstrip() - if splitted != '': - # the replace env vars - env_var_found=True - while env_var_found: - res = env_var_regexp.match(splitted) - env_var_found = res is not None - if env_var_found: - env_var_name = res.groups()[0] - try: - env_var_val = os.environ[env_var_name] - print("replacing $%s with %s" % (env_var_name, env_var_val)) - splitted = splitted.replace("$%s" % env_var_name, env_var_val) - except KeyError: - raise Exception("Environment variable does not exist in file %s: $%s" - "" % (filename, env_var_name)) - else: - all_pkgs.append(splitted) - - install(cmd, all_pkgs) diff --git a/ci_tools/requirements-pip.txt b/ci_tools/requirements-pip.txt deleted file mode 100644 index 59d31dd..0000000 --- a/ci_tools/requirements-pip.txt +++ /dev/null @@ -1,26 +0,0 @@ -# --- to execute setup.py whatever the goal -pytest-runner -setuptools_scm -six - -# --- to install -# sentinel -valid8>=5.0 -makefun -vtypes - -# --- to run the tests -pytest>=4.4 #$PYTEST_VERSION -pytest-logging # ==2015.11.4 -mini-lambda -autoclass - -# --- to generate the reports (see scripts in ci_tools, called by .travis) -# pytest-cov==2.6.0 # after 2.6.1 it requires pytest 3.6 -pytest-html==1.9.0 # otherwise requires pytest 5 -xunitparser - -# --- to generate the doc (see .travis) -# does not work anymore on python 2 -# mkdocs-material # ==2.7.3 -# mkdocs # ==0.17.3 # this is to prevent a version non-compliant with mkdocs-material to be installed. diff --git a/ci_tools/run_tests.sh b/ci_tools/run_tests.sh deleted file mode 100644 index f198952..0000000 --- a/ci_tools/run_tests.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -cleanup() { - rv=$? - # on exit code 1 this is normal (some tests failed), do not stop the build - if [ "$rv" = "1" ]; then - exit 0 - else - exit $rv - fi -} - -trap "cleanup" INT TERM EXIT - -if [ "${TRAVIS_PYTHON_VERSION}" = "3.5" ]; then - # full - # Run tests with "python -m pytest" to use the correct version of pytest - echo -e "\n\n****** Running tests ******\n\n" - coverage run --source pyfields -m pytest --junitxml=reports/junit/junit.xml --html=reports/junit/report.html -s -v pyfields/ # and not pyfields/tests/: we want DocTest too - # buggy - # python -m pytest --junitxml=reports/junit/junit.xml --html=reports/junit/report.html --cov-report term-missing --cov=./pyfields -v pyfields/tests/ -else - # faster - skip coverage and html report but keep junit (because used in validity threshold) - echo -e "\n\n****** Running tests******\n\n" - python -m pytest --junitxml=reports/junit/junit.xml -s -v pyfields/ # and not pyfields/tests/: we want DocTest too -fi diff --git a/ci_tools/write_version.py b/ci_tools/write_version.py deleted file mode 100644 index bc1b887..0000000 --- a/ci_tools/write_version.py +++ /dev/null @@ -1,20 +0,0 @@ -# Authors: Sylvain Marie -# -# License: BSD 3 clause - -from os.path import abspath - -import click -from setuptools_scm import get_version - - -@click.command() -@click.argument('dest_folder') -def write_version(dest_folder): - file_name = '%s/_version.py' % dest_folder - print("Writing version to file: %s" % abspath(file_name)) - get_version('.', write_to=file_name) - - -if __name__ == '__main__': - write_version() diff --git a/docs/api_reference.md b/docs/api_reference.md index dcb785b..f0f299b 100644 --- a/docs/api_reference.md +++ b/docs/api_reference.md @@ -134,8 +134,7 @@ generating default value for )` returns a factory that creates a copy of the provided `val` everytime it is called. Handy if you wish to use mutable - objects as default values for your fields ; for example lists. + - `copy_value(, deep=True, autocheck=True)` returns a factory that creates a copy of the provided `val` everytime it is called. Handy if you wish to use mutable objects as default values for your fields ; for example lists. Not that starting in version 1.7, `copy_value` will automatically check that the (deep) copy operation is feasible, at initial call time. You can disable this by setting `autocheck=False`. - `copy_attr(, deep=True)` returns a factory that creates a (deep or not) copy of the value in the given attribute everytime it is called. @@ -356,10 +355,11 @@ Wall ## `@autofields` ```python -def autofields(check_types=False, # type: bool - include_upper=False, # type: bool - include_dunder=False, # type: bool - make_init=True # type: bool +def autofields(check_types=False, # type: bool + include_upper=False, # type: bool + include_dunder=False, # type: bool + exclude=DEFAULT_EXCLUDED, # type: Iterable[str] + make_init=True # type: bool ): ``` @@ -402,6 +402,86 @@ Traceback (most recent call last): TypeError: __init__() got an unexpected keyword argument 'SENTENCE' ``` +**Parameters** + + - `check_types`: boolean flag (default: `False`) indicating the value of `check_type` for created fields. Note that the type hint of each created field is copied from the type hint of the member it originates from. + + - `include_upper`: boolean flag (default: `False`) indicating whether upper-case class members should be also transformed to fields (usually such names are reserved for class constants, not for fields). + + - `include_dunder`: boolean flag (default: `False`) indicating whether dunder-named class members should be also transformed to fields. Note that even if you set this to True, members with reserved python dunder names will not be transformed. See `is_reserved_dunder` for the list of reserved names. + + - `exclude`: a tuple of field names that should be excluded from automatic creation. By default this is set to `DEFAULT_EXCLUDED`, which eliminates fields created by `ABC`. + + - `make_init`: boolean flag (default: `True`) indicating whether a constructor should be created for the class if no `__init__` method is present. Such constructor will be created using `__init__ = make_init()`. + +## `@autoclass` + +```python +def autoclass( + # --- autofields + fields=True, # type: Union[bool, DecoratedClass] + typecheck=False, # type: bool + # --- constructor + init=True, # type: bool + # --- class methods + dict=True, # type: bool + dict_public_only=True, # type: bool + repr=True, # type: bool + repr_curly_mode=False, # type: bool + repr_public_only=True, # type: bool + eq=True, # type: bool + eq_public_only=False, # type: bool + hash=True, # type: bool + hash_public_only=False, # type: bool + # --- advanced + af_include_upper=False, # type: bool + af_include_dunder=False, # type: bool + af_exclude=DEFAULT_EXCLUDED, # type: Iterable[str] + ac_include=None, # type: Union[str, Tuple[str]] + ac_exclude=None, # type: Union[str, Tuple[str]] + ): +``` + +A decorator to automate many things at once for your class. + +First if `fields=True` (default) it executes `@autofields` to generate fields from attribute defined at class level. + + - you can include attributes with dunder names or uppercase names with `af_include_dunder` and + `af_include_upper` respectively + - you can enable type checking on all fields at once by setting `check_types=True` + - the constructor is not generated at this stage + +Then it generates methods for the class: + + - if `init=True` (default) it generates the constructor based on all fields present, using `make_init()`. + - if `dict=True` (default) it generates `to_dict` and `from_dict` methods. Only public fields are represented in `to_dict`, you can change this with `dict_public_only=False`. + - if `repr=True` (default) it generates a `__repr__` method. Only public fields are represented, you can change this with `repr_public_only=False`. + - if `eq=True` (default) it generates an `__eq__` method, so that instances can be compared to other instances and to dicts. All fields are compared by default, you can change this with `eq_public_only=True`. + - if `hash=True` (default) it generates an `__hash__` method, so that instances can be inserted in sets or dict keys. All fields are hashed by default, you can change this with `hash_public_only=True`. + +You can specify an explicit list of fields to include or exclude in the dict/repr/eq/hash methods with the `ac_include` and `ac_exclude` parameters. + +Note that this decorator is similar to the [autoclass library](https://smarie.github.io/python-autoclass/) but is reimplemented here. In particular the parameter names and dictionary behaviour are different. + +**Parameters** + + - `fields`: boolean flag (default: True) indicating whether to create fields automatically. See `@autofields` for details + - `typecheck`: boolean flag (default: False) used when fields=True indicating the value of `check_type` for created fields. Note that the type hint of each created field is copied from the type hint of the member it originates from. + - `init`: boolean flag (default: True) indicating whether a constructor should be created for the class if no `__init__` method is already present. Such constructor will be created using `__init__ = make_init()`. This is the same behaviour than `make_init` in `@autofields`. Note that this is *not* automatically disabled if you set `fields=False`. + - `dict`: a boolean to automatically create `cls.from_dict(dct)` and `obj.to_dict()` methods on the class (default: True). + - `dict_public_only`: a boolean (default: True) to indicate if only public fields should be exposed in the dictionary view created by `to_dict` when `dict=True`. + - `repr`: a boolean (default: True) to indicate if `__repr__` and `__str__` should be created for the class if not explicitly present. + - `repr_curly_mode`: a boolean (default: False) to turn on an alternate string representation when `repr=True`, using curly braces. + - `repr_public_only`: a boolean (default: True) to indicate if only public fields should be exposed in the string representation when `repr=True`. + - `eq`: a boolean (default: True) to indicate if `__eq__` should be created for the class if not explicitly present. + - `eq_public_only`: a boolean (default: False) to indicate if only public fields should be compared in the equality method created when `eq=True`. + - `hash`: a boolean (default: True) to indicate if `__hash__` should be created for the class if not explicitly present. + - `hash_public_only`: a boolean (default: False) to indicate if only public fields should be hashed in the hash method created when `hash=True`. + - `af_include_upper`: boolean flag (default: False) used when autofields=True indicating whether upper-case class members should be also transformed to fields (usually such names are reserved for class constants, not for fields). + - `af_include_dunder`: boolean flag (default: False) used when autofields=True indicating whether dunder-named class members should be also transformed to fields. Note that even if you set this to True, members with reserved python dunder names will not be transformed. See `is_reserved_dunder` for the list of reserved names. + - `af_exclude`: a tuple of explicit attribute names to exclude from automatic fields creation. See `@autofields(exclude=...)` for details. + - `ac_include`: a tuple of explicit attribute names to include in dict/repr/eq/hash (None means all) + - `ac_exclude`: a tuple of explicit attribute names to exclude in dict/repr/eq/hash. In such case, include should be None. ## API @@ -419,17 +499,20 @@ If `include_inherited` is `True` (default), the method will return `True` if at ### `get_fields` ```python -def get_fields(cls, +def get_fields(cls_or_obj, include_inherited=True, # type: bool remove_duplicates=True, # type: bool ancestors_first=True, # type: bool + public_only=False, # type: bool container_type=tuple, # type: Type[T] ) ``` -Utility method to collect all fields defined in a class, including all inherited or not. +Utility method to collect all fields defined in a class, including all inherited or not, in definition order. By default duplicates are removed and ancestor fields are included and appear first. If a field is overridden, it will appear at the position of the overridden field in the order. +If an object is provided, `getfields` will be executed on its class. + ### `yield_fields` ```python @@ -437,6 +520,7 @@ def yield_fields(cls, include_inherited=True, # type: bool remove_duplicates=True, # type: bool ancestors_first=True, # type: bool + public_only=False, # type: bool ) ``` @@ -450,3 +534,21 @@ def get_field(cls, name) Utility method to return the field member with name `name` in class `cls`. If the member is not a field, a `NotAFieldError` is raised. + +### `get_field_values` + +```python +def get_field_values(obj, + include_inherited=True, # type: bool + remove_duplicates=True, # type: bool + ancestors_first=True, # type: bool + public_only=False, # type: bool + container_type=ODict, # type: Type[T] + ) +``` + +Utility method to collect all field names and values defined on an object, including all inherited or not. + +By default duplicates are removed and ancestor fields are included and appear first. If a field is overridden, it will appear at the position of the overridden field in the order. + +The result is an ordered dictionary (a `dict` in python 3.7, an `OrderedDict` otherwise) of {name: value} pairs. One can change the container type with the `container_type` attribute though, that will receive an iterable of (key, value) pairs. diff --git a/docs/changelog.md b/docs/changelog.md index 5bf4311..abd6a47 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,8 +1,66 @@ # Changelog +### 1.7.2 - bugfix + + - Fixed `TypeError: Neither typeguard not pytypes is installed` even with `typeguard` installed. + Fixed [#91] (https://github.com/smarie/python-pyfields/issues/91) + +### 1.7.1 - Compatibility fix for typeguard `3.0.0` + + - Fixed `TypeError: check_type() takes 2 positional arguments but 3 were given` triggering erroneous `FieldTypeError` + when `typeguard>=3.0.0` is used. Fixed [#87](https://github.com/smarie/python-pyfields/issues/87) + +### 1.7.0 - Better support for non-deep-copiable default values in `@autofields` + + - `@autofields` and `@autoclass` now raise an error when a field definition can not be valid, because the default value can not be deep-copied. This will help users detect issues such as [#84](https://github.com/smarie/python-pyfields/issues/84) earlier. Implementation is done through a new `autocheck` option in the `copy_value` factory. + + - `@autofields` and `@autoclass` now provide an `exclude` (resp. `af_exclude`) list, to list names for fields that should not be created. By default this contains a reserved name from `abc.ABCMeta`, for convenience. Fixes [#84](https://github.com/smarie/python-pyfields/issues/84). + + +### 1.6.2 - CI/CD migration + + - This is a technical release with no code change, to validate the new Github Actions workflow. + +### 1.6.1 - Bugfix + + - Fixed an issue with `autofields` (and therefore `autoclass` too) where a field would be mistakenly recreated on a subclass when that subclass does not define type hints while the parent class defines type hints. Fixes [#81](https://github.com/smarie/python-pyfields/issues/81) + +### 1.6.0 - we now have our own version of `@autoclass` + + - Copied the relevant contents from `autoclass` so as to get rid of the dependency. Since we are in a `pyfields` context there were many things that could be dropped and remaining code could be easily copied over. Also took this opportunity to replace the dict view with a `to_dict`/`from_dict` pair of methods, this seems less intrusive in the class design. Finally the parameter names have been simplified, see [API reference](./api_reference.md#autoclass) for details. Fixes [#79](https://github.com/smarie/python-pyfields/issues/79) + +### 1.5.0 - updated `@autoclass` signature + + - Improved `@autoclass` so that it is much easier to access the relevant arguments from underlying `@autofields` and `@autoclass`. Fixed [#78](https://github.com/smarie/python-pyfields/issues/78) + +### 1.4.0 - new `@autoclass` decorator + + - New `@autoclass` decorator directly available from `pyfields`. It is merely equivalent to the original `@autoclass` with option `autofields=True`, which makes it easier to use on classes with automatic fields. Fixes [#75](https://github.com/smarie/python-pyfields/issues/75) + +### 1.3.2 - bugfix + + - Fields order are preserved by `@autofields` even in the case of an explicit `field()` with all others implicit. Fixed [#77](https://github.com/smarie/python-pyfields/issues/77) + +### 1.3.1 - bugfix + + - Fields order are preserved by `@autofields` even in the case of a field with just a type annotation. Fixed [#76](https://github.com/smarie/python-pyfields/issues/76) + +### 1.3.0 - Support for Forward references, PEP563 and class-level access + + - String forward references in type hints, and PEP563 behaviour, is now supported. When this case happense, the type hint resolution is delayed until the field is first accessed. Fixes [#73](https://github.com/smarie/python-pyfields/issues/73) + + - Accessing a field definition from a class directly is now enabled, since PyCharm [fixed their autocompletion bug](https://youtrack.jetbrains.com/issue/PY-38151). Fixes [#12](https://github.com/smarie/python-pyfields/issues/12) + + +### 1.2.0 - `getfields` improvements and new `get_field_values` + + - `getfields` can now be executed on an instance, and provides a `public_only` option. Fixes [#69](https://github.com/smarie/python-pyfields/issues/69) + + - New `get_field_values` method to get an ordered dict-like of field name: value. Fixes [#70](https://github.com/smarie/python-pyfields/issues/70) + ### 1.1.5 - bugfix - - `@autofields` now correctly skips `@property` and descriptor members. Fixes [#67](https://github.com/smarie/python-pyfields/issues/67) + - `@autofields` now correctly skips `@property` and more generally, descriptor members. Fixes [#67](https://github.com/smarie/python-pyfields/issues/67) ### 1.1.4 - better python 2 packaging diff --git a/docs/index.md b/docs/index.md index 2297ea3..22fbbeb 100644 --- a/docs/index.md +++ b/docs/index.md @@ -2,12 +2,13 @@ *Define fields in python classes. Easily.* -[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://travis-ci.org/smarie/python-pyfields.svg?branch=master)](https://travis-ci.org/smarie/python-pyfields) [![Tests Status](https://smarie.github.io/python-pyfields/junit/junit-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/junit/report.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/master/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) +[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://github.com/smarie/python-pyfields/actions/workflows/base.yml/badge.svg)](https://github.com/smarie/python-pyfields/actions/workflows/base.yml) [![Tests Status](./reports/junit/junit-badge.svg?dummy=8484744)](./reports/junit/report.html) [![Coverage Status](./reports/coverage/coverage-badge.svg?dummy=8484744)](./reports/coverage/index.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/main/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) [![Flake8 Status](./reports/flake8/flake8-badge.svg?dummy=8484744)](./reports/flake8/index.html) [![Documentation](https://img.shields.io/badge/doc-latest-blue.svg)](https://smarie.github.io/python-pyfields/) [![PyPI](https://img.shields.io/pypi/v/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Downloads](https://pepy.tech/badge/pyfields)](https://pepy.tech/project/pyfields) [![Downloads per week](https://pepy.tech/badge/pyfields/week)](https://pepy.tech/project/pyfields) [![GitHub stars](https://img.shields.io/github/stars/smarie/python-pyfields.svg)](https://github.com/smarie/python-pyfields/stargazers) -!!! new `@autofields` feature, [check it out](#a-autofields) ! -!!! success "`pyfields` is now automatically supported by `autoclass` ! See [here](#hash-dict-eq-repr) for details." +!!! success "`pyfields` now has its own [`@autoclass`](#b-autoclass) with sensible defaults, to complement the existing [`@autofields`](#a-autofields) feature! No need to import it from `autoclass` anymore." + +!!! success "New [`marshmallow-pyfields`](https://smarie.github.io/python-marshmallow-pyfields/) project brings `pyfields` to the famous [`marshmallow`](https://marshmallow.readthedocs.io/en/stable/) ORM ecosystem!" `pyfields` provides a simple and elegant way to define fields in python classes. With `pyfields` you explicitly define all aspects of a field (default value/factory, type, validators, converters, documentation...) in a single place, and can refer to it from other places. @@ -29,7 +30,9 @@ It provides **many optional features** that will make your object-oriented devel - initializing fields in your *constructor* is very easy and highly customizable -Finally, it offers an API that other libraries can leverage to get the list of fields. For example `autoclass` now leverages `pyfields` to automatically add hash/dict/eq/repr to your class. + - you can automate fields creation with `@autofields` or even automatically add hash/dict/eq/repr to your class based on the fields using `@autoclass`. + +Finally, it offers an API that other libraries can leverage to [get the list of fields](./api_reference.md#api). If your first reaction is "what about `attrs` / `dataclasses` / `pydantic` / `characteristic` / `traits` / `traitlets` / ...", well all of these inspired `pyfields` a great deal, but all of these have stronger constraints on the class - which I did not want. Please have a look [here](why.md) for a complete list of inspirators. @@ -653,7 +656,42 @@ class Pocket: By default type checking is not enabled on the generated fields, but you can enable it with `@autofields(check_types=True)`. You can also disable constructor creation with `@autofields(make_init=False)`. See [API reference](https://smarie.github.io/python-pyfields/api_reference/#api) for details. -#### b - `VType`s +#### b - `@autoclass` + +Do you **also** wish to have `hash`, `dict`, `eq`, and `repr` views automatically created for your class ? From version `1.6` on, `pyfields` provides an `@autoclass` decorator. That way, your field definitions can directly be reused for most of the class behaviour. + +```python +from pyfields import field, autoclass + +@autoclass +class Foo: + msg: str + age: int = 12 + height: int = field(default=50) + +foo = Foo(msg='hello') + +print(foo) # automatic string representation +print(foo.to_dict()) # automatic from/to dict +assert foo == Foo.from_dict(foo.to_dict()) + +assert foo == Foo(msg='hello', age=12, height=50) # automatic equality comparison +assert foo == {'msg': 'hello', 'age': 12, 'height': 50} # automatic eq comparison with dicts +``` + +yields + +``` +Foo(msg='hello', age=12, height=50) +{'msg': 'hello', 'age': 12, 'height': 50} +``` + +!!!warning "use pyfields `@autoclass` over the one in `autoclass` library" + This decorator combines `@autofields` and `@autoclass` into one, with options that are relevant to `pyfields`. Therefore it is now the recommended one to use. + +See [API reference](./api_reference.md#autoclass) for details. + +#### c - `VType`s Instead of registering validators in the field, you can now use `vtypes`. That way, everything is in the type: type checking AND value validation. @@ -691,37 +729,6 @@ class Item: `pyfields` offers an API so that other libraries can inspect the fields: `get_fields`, `yield_fields`, `has_fields`, `get_field`. See [API reference](https://smarie.github.io/python-pyfields/api_reference/#api) for details. -#### hash, dict, eq, repr - -`autoclass` is now compliant with `pyfields`. So you can use `@autoclass`, or `@autorepr`, `@autohash`, `@autodict`... on the decorated class. That way, your fields definition is directly reused for most of the class behaviour. - -```python -from autoclass import autoclass -from pyfields import field - -@autoclass -class Foo: - msg: str = field() - age: int = field(default=12) - -foo = Foo(msg='hello') - -print(foo) # automatic string representation -print(dict(foo)) # automatic dict view - -assert foo == Foo(msg='hello', age=12) # automatic equality comparison -assert foo == {'msg': 'hello', 'age': 12} # automatic eq comparison with dicts -``` - -yields - -``` -Foo(msg='hello', age=12) -{'msg': 'hello', 'age': 12} -``` - -See [here](https://smarie.github.io/python-autoclass/#pyfields-combo) for details. - #### Slots You can use `pyfields` if your class has `__slots__`. You will simply have to use an underscore in the slot name corresponding to a field: `_`. For example: diff --git a/docs/long_description.md b/docs/long_description.md index 571d322..a86b69e 100644 --- a/docs/long_description.md +++ b/docs/long_description.md @@ -2,7 +2,7 @@ *Define fields in python classes. Easily.* -[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://travis-ci.org/smarie/python-pyfields.svg?branch=master)](https://travis-ci.org/smarie/python-pyfields) [![Tests Status](https://smarie.github.io/python-pyfields/junit/junit-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/junit/report.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/master/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) +[![Python versions](https://img.shields.io/pypi/pyversions/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Build Status](https://github.com/smarie/python-pyfields/actions/workflows/base.yml/badge.svg)](https://github.com/smarie/python-pyfields/actions/workflows/base.yml) [![Tests Status](https://smarie.github.io/python-pyfields/reports/junit/junit-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/junit/report.html) [![Coverage Status](https://smarie.github.io/python-pyfields/reports/coverage/coverage-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/coverage/index.html) [![codecov](https://codecov.io/gh/smarie/python-pyfields/branch/main/graph/badge.svg)](https://codecov.io/gh/smarie/python-pyfields) [![Flake8 Status](https://smarie.github.io/python-pyfields/reports/flake8/flake8-badge.svg?dummy=8484744)](https://smarie.github.io/python-pyfields/reports/flake8/index.html) [![Documentation](https://img.shields.io/badge/doc-latest-blue.svg)](https://smarie.github.io/python-pyfields/) [![PyPI](https://img.shields.io/pypi/v/pyfields.svg)](https://pypi.python.org/pypi/pyfields/) [![Downloads](https://pepy.tech/badge/pyfields)](https://pepy.tech/project/pyfields) [![Downloads per week](https://pepy.tech/badge/pyfields/week)](https://pepy.tech/project/pyfields) [![GitHub stars](https://img.shields.io/github/stars/smarie/python-pyfields.svg)](https://github.com/smarie/python-pyfields/stargazers) diff --git a/docs/mkdocs.yml b/mkdocs.yml similarity index 53% rename from docs/mkdocs.yml rename to mkdocs.yml index e336a0f..b6839d4 100644 --- a/docs/mkdocs.yml +++ b/mkdocs.yml @@ -1,8 +1,10 @@ site_name: pyfields # site_description: 'A short description of my project' repo_url: https://github.com/smarie/python-pyfields -docs_dir: . -site_dir: ../site +#docs_dir: . +#site_dir: ../site +# default branch is main instead of master now on github +edit_uri : ./edit/main/docs nav: - Home: index.md - Why fields: why.md @@ -12,8 +14,10 @@ nav: theme: material # readthedocs mkdocs markdown_extensions: + - pymdownx.highlight # see https://squidfunk.github.io/mkdocs-material/reference/code-blocks/#highlight + - pymdownx.superfences # same as above as well as code blocks inside other blocks - admonition # to add notes such as http://squidfunk.github.io/mkdocs-material/extensions/admonition/ - - codehilite: - guess_lang: true +# - codehilite: +# guess_lang: false - toc: - permalink: true \ No newline at end of file + permalink: true diff --git a/noxfile-requirements.txt b/noxfile-requirements.txt new file mode 100644 index 0000000..6fb3e1c --- /dev/null +++ b/noxfile-requirements.txt @@ -0,0 +1,5 @@ +nox +toml +makefun +setuptools_scm # used in 'release' +keyring # used in 'release' diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..18ca842 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,305 @@ +from itertools import product +from json import dumps +import logging + +import nox # noqa +from pathlib import Path # noqa +import sys + +# add parent folder to python path so that we can import noxfile_utils.py +# note that you need to "pip install -r noxfile-requiterements.txt" for this file to work. +sys.path.append(str(Path(__file__).parent / "ci_tools")) +from nox_utils import PY27, PY37, PY36, PY35, PY38, PY39, PY310, power_session, rm_folder, rm_file, PowerSession # noqa + + +pkg_name = "pyfields" +gh_org = "smarie" +gh_repo = "python-pyfields" + +ENVS = { + # --- python 3.9 - first in list to catch obvious bugs on local executions + (PY39, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + # (PY38, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + (PY39, "typeguard"): {"coverage": False, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}}, + # --- python 3.8 + (PY38, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + # (PY38, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + (PY38, "typeguard"): {"coverage": False, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}}, + # --- python 2.7 + (PY27, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + (PY27, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + # --- python 3.5.3 > requires free channel > hard to make it work on GHA + # ("3.5.3", "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + # ("3.5.3", "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + # ("3.5.3", "typeguard"): {"coverage": False, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}}, + # --- python 3.5 + (PY35, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + (PY35, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + (PY35, "typeguard"): {"coverage": False, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}}, + # --- python 3.6 + (PY36, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + (PY36, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + (PY36, "typeguard"): {"coverage": False, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}}, + # --- python 3.7 + (PY37, "no-typechecker"): {"coverage": False, "type_checker": None, "pkg_specs": {"pip": ">19"}}, + #(PY37, "pytypes"): {"coverage": False, "type_checker": "pytypes", "pkg_specs": {"pip": ">19"}}, + # IMPORTANT: this should be last so that the folder docs/reports is not deleted afterwards + (PY37, "typeguard"): {"coverage": True, "type_checker": "typeguard", "pkg_specs": {"pip": ">19"}} +} + + +# set the default activated sessions, minimal for CI +nox.options.sessions = ["tests", "flake8"] # , "docs", "gh_pages" +nox.options.reuse_existing_virtualenvs = True # this can be done using -r +# if platform.system() == "Windows": >> always use this for better control +nox.options.default_venv_backend = "conda" +# os.environ["NO_COLOR"] = "True" # nox.options.nocolor = True does not work +# nox.options.verbose = True + +nox_logger = logging.getLogger("nox") +# nox_logger.setLevel(logging.INFO) NO !!!! this prevents the "verbose" nox flag to work ! + + +class Folders: + root = Path(__file__).parent + ci_tools = root / "ci_tools" + runlogs = root / Path(nox.options.envdir or ".nox") / "_runlogs" + runlogs.mkdir(parents=True, exist_ok=True) + dist = root / "dist" + site = root / "site" + site_reports = site / "reports" + reports_root = root / "docs" / "reports" + test_reports = reports_root / "junit" + test_xml = test_reports / "junit.xml" + test_html = test_reports / "report.html" + test_badge = test_reports / "junit-badge.svg" + coverage_reports = reports_root / "coverage" + coverage_xml = coverage_reports / "coverage.xml" + coverage_intermediate_file = root / ".coverage" + coverage_badge = coverage_reports / "coverage-badge.svg" + flake8_reports = reports_root / "flake8" + flake8_intermediate_file = root / "flake8stats.txt" + flake8_badge = flake8_reports / "flake8-badge.svg" + + +@power_session(envs=ENVS, logsdir=Folders.runlogs) +def tests(session: PowerSession, coverage, type_checker, pkg_specs): + """Run the test suite, including test reports generation and coverage reports. """ + + # As soon as this runs, we delete the target site and coverage files to avoid reporting wrong coverage/etc. + rm_folder(Folders.site) + rm_folder(Folders.reports_root) + # delete the .coverage files if any (they are not supposed to be any, but just in case) + rm_file(Folders.coverage_intermediate_file) + rm_file(Folders.root / "coverage.xml") + + # CI-only dependencies + # Did we receive a flag through positional arguments ? (nox -s tests -- ) + # install_ci_deps = False + # if len(session.posargs) == 1: + # assert session.posargs[0] == "keyrings.alt" + # install_ci_deps = True + # elif len(session.posargs) > 1: + # raise ValueError("Only a single positional argument is accepted, received: %r" % session.posargs) + + # uncomment and edit if you wish to uninstall something without deleting the whole env + # session.run2("pip uninstall pytest-asyncio --yes") + + # install all requirements + session.install_reqs(setup=True, install=True, tests=True, versions_dct=pkg_specs) + + # install optional typechecker + if type_checker is not None: + session.install2(type_checker) + + # install CI-only dependencies + # if install_ci_deps: + # session.install2("keyrings.alt") + + # list all (conda list alone does not work correctly on github actions) + # session.run2("conda list") + conda_prefix = Path(session.bin) + if conda_prefix.name == "bin": + conda_prefix = conda_prefix.parent + session.run2("conda list", env={"CONDA_PREFIX": str(conda_prefix), "CONDA_DEFAULT_ENV": session.get_session_id()}) + + # Fail if the assumed python version is not the actual one + session.run2("python ci_tools/check_python_version.py %s" % session.python) + + # install self so that it is recognized by pytest + session.run2("pip install -e . --no-deps") + # session.install("-e", ".", "--no-deps") + + # check that it can be imported even from a different folder + # Important: do not surround the command into double quotes as in the shell ! + session.run('python', '-c', 'import os; os.chdir(\'./docs/\'); import %s' % pkg_name) + + # finally run all tests + if not coverage: + # simple: pytest only + session.run2("python -m pytest --cache-clear -v %s/tests/" % pkg_name) + else: + # coverage + junit html reports + badge generation + session.install_reqs(phase="coverage", + phase_reqs=["coverage", "pytest-html", "genbadge[tests,coverage]"], + versions_dct=pkg_specs) + + # --coverage + junit html reports + session.run2("coverage run --source {pkg_name} " + "-m pytest --cache-clear --junitxml={test_xml} --html={test_html} -v {pkg_name}/tests/" + "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2("coverage report") + session.run2("coverage xml -o {covxml}".format(covxml=Folders.coverage_xml)) + session.run2("coverage html -d {dst}".format(dst=Folders.coverage_reports)) + # delete this intermediate file, it is not needed anymore + rm_file(Folders.coverage_intermediate_file) + + # --generates the badge for the test results and fail build if less than x% tests pass + nox_logger.info("Generating badge for tests coverage") + # Use our own package to generate the badge + session.run2("genbadge tests -i %s -o %s -t 100" % (Folders.test_xml, Folders.test_badge)) + session.run2("genbadge coverage -i %s -o %s" % (Folders.coverage_xml, Folders.coverage_badge)) + + +@power_session(python=PY38, logsdir=Folders.runlogs) +def flake8(session: PowerSession): + """Launch flake8 qualimetry.""" + + session.install("-r", str(Folders.ci_tools / "flake8-requirements.txt")) + session.install("genbadge[flake8]") + session.run2("pip install -e .[flake8]") + + rm_folder(Folders.flake8_reports) + Folders.flake8_reports.mkdir(parents=True, exist_ok=True) + rm_file(Folders.flake8_intermediate_file) + + # Options are set in `setup.cfg` file + session.run("flake8", pkg_name, "--exit-zero", "--format=html", "--htmldir", str(Folders.flake8_reports), + "--statistics", "--tee", "--output-file", str(Folders.flake8_intermediate_file)) + # generate our badge + session.run2("genbadge flake8 -i %s -o %s" % (Folders.flake8_intermediate_file, Folders.flake8_badge)) + rm_file(Folders.flake8_intermediate_file) + + +@power_session(python=[PY37]) +def docs(session: PowerSession): + """Generates the doc and serves it on a local http server. Pass '-- build' to build statically instead.""" + + session.install_reqs(phase="docs", phase_reqs=["mkdocs-material", "mkdocs", "pymdown-extensions", "pygments"]) + + if session.posargs: + # use posargs instead of "serve" + session.run2("mkdocs %s" % " ".join(session.posargs)) + else: + session.run2("mkdocs serve") + + +@power_session(python=[PY37]) +def publish(session: PowerSession): + """Deploy the docs+reports on github pages. Note: this rebuilds the docs""" + + session.install_reqs(phase="mkdocs", phase_reqs=["mkdocs-material", "mkdocs", "pymdown-extensions", "pygments"]) + + # possibly rebuild the docs in a static way (mkdocs serve does not build locally) + session.run2("mkdocs build") + + # check that the doc has been generated with coverage + if not Folders.site_reports.exists(): + raise ValueError("Test reports have not been built yet. Please run 'nox -s tests-3.7' first") + + # publish the docs + session.run2("mkdocs gh-deploy") + + # publish the coverage - now in github actions only + # session.install_reqs(phase="codecov", phase_reqs=["codecov", "keyring"]) + # # keyring set https://app.codecov.io/gh// token + # import keyring # (note that this import is not from the session env but the main nox env) + # codecov_token = keyring.get_password("https://app.codecov.io/gh//>", "token") + # # note: do not use --root nor -f ! otherwise "There was an error processing coverage reports" + # session.run2('codecov -t %s -f %s' % (codecov_token, Folders.coverage_xml)) + + +@power_session(python=[PY37]) +def release(session: PowerSession): + """Create a release on github corresponding to the latest tag""" + + # Get current tag using setuptools_scm and make sure this is not a dirty/dev one + from setuptools_scm import get_version # (note that this import is not from the session env but the main nox env) + from setuptools_scm.version import guess_next_dev_version + version = [] + + def my_scheme(version_): + version.append(version_) + return guess_next_dev_version(version_) + current_tag = get_version(".", version_scheme=my_scheme) + + # create the package + session.install_reqs(phase="setup.py#dist", phase_reqs=["setuptools_scm"]) + rm_folder(Folders.dist) + session.run2("python setup.py sdist bdist_wheel") + + if version[0].dirty or not version[0].exact: + raise ValueError("You need to execute this action on a clean tag version with no local changes.") + + # Did we receive a token through positional arguments ? (nox -s release -- ) + if len(session.posargs) == 1: + # Run from within github actions - no need to publish on pypi + gh_token = session.posargs[0] + publish_on_pypi = False + + elif len(session.posargs) == 0: + # Run from local commandline - assume we want to manually publish on PyPi + publish_on_pypi = True + + # keyring set https://docs.github.com/en/rest token + import keyring # (note that this import is not from the session env but the main nox env) + gh_token = keyring.get_password("https://docs.github.com/en/rest", "token") + assert len(gh_token) > 0 + + else: + raise ValueError("Only a single positional arg is allowed for now") + + # publish the package on PyPi + if publish_on_pypi: + # keyring set https://upload.pypi.org/legacy/ your-username + # keyring set https://test.pypi.org/legacy/ your-username + session.install_reqs(phase="PyPi", phase_reqs=["twine"]) + session.run2("twine upload dist/* -u smarie") # -r testpypi + + # create the github release + session.install_reqs(phase="release", phase_reqs=["click", "PyGithub"]) + session.run2("python ci_tools/github_release.py -s {gh_token} " + "--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " + "-d https://{gh_org}.github.io/{gh_repo}/changelog {tag}" + "".format(gh_token=gh_token, gh_org=gh_org, gh_repo=gh_repo, tag=current_tag)) + + +@nox.session(python=False) +def gha_list(session): + """(mandatory arg: ) Prints all sessions available for , for GithubActions.""" + + # see https://stackoverflow.com/q/66747359/7262247 + + # get the desired base session to generate the list for + if len(session.posargs) != 1: + raise ValueError("This session has a mandatory argument: ") + session_func = globals()[session.posargs[0]] + + # list all sessions for this base session + try: + session_func.parametrize + except AttributeError: + sessions_list = ["%s-%s" % (session_func.__name__, py) for py in session_func.python] + else: + sessions_list = ["%s-%s(%s)" % (session_func.__name__, py, param) + for py, param in product(session_func.python, session_func.parametrize)] + + # print the list so that it can be caught by GHA. + # Note that json.dumps is optional since this is a list of string. + # However it is to remind us that GHA expects a well-formatted json list of strings. + print(dumps(sessions_list)) + + +# if __name__ == '__main__': +# # allow this file to be executable for easy debugging in any IDE +# nox.run(globals()) diff --git a/pyfields/__init__.py b/pyfields/__init__.py index 21dbaf5..ac6386d 100644 --- a/pyfields/__init__.py +++ b/pyfields/__init__.py @@ -1,10 +1,15 @@ +# Authors: Sylvain MARIE +# + All contributors to +# +# License: 3-clause BSD, from .typing_utils import FieldTypeError from .core import field, Field, FieldError, MandatoryFieldInitError, UnsupportedOnNativeFieldError, \ ReadOnlyFieldError, NoneError from .validate_n_convert import Converter, ConversionError, DetailedConversionResults, trace_convert from .init_makers import inject_fields, make_init, init_fields -from .helpers import copy_value, copy_field, copy_attr, has_fields, get_fields, yield_fields, get_field -from .autofields_ import autofields +from .helpers import copy_value, copy_field, copy_attr, has_fields, get_fields, yield_fields, get_field, \ + get_field_values +from .autofields_ import autofields, autoclass try: # Distribution mode : import from _version.py generated by setuptools_scm during release @@ -25,5 +30,6 @@ 'Converter', 'ConversionError', 'DetailedConversionResults', 'trace_convert', 'inject_fields', 'make_init', 'init_fields', 'copy_value', 'copy_field', 'copy_attr', 'has_fields', 'get_fields', 'yield_fields', 'get_field', - 'autofields' + 'get_field_values', + 'autofields', 'autoclass' ] diff --git a/pyfields/autofields_.py b/pyfields/autofields_.py index 321b8fc..d483142 100644 --- a/pyfields/autofields_.py +++ b/pyfields/autofields_.py @@ -1,26 +1,41 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. +# License: 3-clause BSD, import sys +from copy import deepcopy from inspect import isdatadescriptor, ismethoddescriptor try: - from typing import Union, Callable, Type, Any, TypeVar + from typing import Union, Callable, Type, Any, TypeVar, Tuple, Iterable DecoratedClass = TypeVar("DecoratedClass", bound=Type[Any]) except ImportError: pass -from pyfields import Field, field, make_init as mkinit, copy_value +from .core import Field, field +from .init_makers import make_init as mkinit +from .helpers import copy_value, get_fields PY36 = sys.version_info >= (3, 6) +DEFAULT_EXCLUDED = ('_abc_impl',) -def autofields(check_types=False, # type: Union[bool, DecoratedClass] - include_upper=False, # type: bool - include_dunder=False, # type: bool - make_init=True # type: bool +def _make_init(cls): + """Utility method used in autofields and autoclass to create the constructor based on the class fields""" + if "__init__" not in cls.__dict__: + new_init = mkinit() + cls.__init__ = new_init + # attach explicitly to the class so that the descriptor is correctly completed. + new_init.__set_name__(cls, '__init__') + + +def autofields(check_types=False, # type: Union[bool, DecoratedClass] + include_upper=False, # type: bool + include_dunder=False, # type: bool + exclude=DEFAULT_EXCLUDED, # type: Iterable[str] + make_init=True, # type: bool ): # type: (...) -> Union[Callable[[DecoratedClass], DecoratedClass], DecoratedClass] """ @@ -66,14 +81,16 @@ def autofields(check_types=False, # type: Union[bool, DecoratedClass] TypeError: __init__() got an unexpected keyword argument 'SENTENCE' - :param check_types: boolean flag (default: False) indicating the value of `check_type` for created fields. Note that - the type hint of each created field is copied from the type hint of the member it originates from. - :param include_upper: boolean flag (default: False) indicating whether upper-case class members should be also - transformed to fields. - :param include_dunder: boolean flag (default: False) indicating whether dunder-named class members should be also + :param check_types: boolean flag (default: `False`) indicating the value of `check_type` for created fields. Note + that the type hint of each created field is copied from the type hint of the member it originates from. + :param include_upper: boolean flag (default: `False`) indicating whether upper-case class members should be also + transformed to fields (usually such names are reserved for class constants, not for fields). + :param include_dunder: boolean flag (default: `False`) indicating whether dunder-named class members should be also transformed to fields. Note that even if you set this to True, members with reserved python dunder names will not be transformed. See `is_reserved_dunder` for the list of reserved names. - :param make_init: boolean flag (default: True) indicating whether a constructor should be created for the class if + :param exclude: a tuple of field names that should be excluded from automatic creation. By default this is set to + `DEFAULT_EXCLUDED`, which eliminates fields created by `ABC`. + :param make_init: boolean flag (default: `True`) indicating whether a constructor should be created for the class if no `__init__` method is present. Such constructor will be created using `__init__ = make_init()`. :return: """ @@ -82,10 +99,12 @@ def _autofields(cls): try: # Are type hints present ? - cls_annotations = cls.__annotations__ + # note: since this attribute can be inherited, we get the own attribute only + # cls_annotations = cls.__annotations__ + cls_annotations = getownattr(cls, "__annotations__") except AttributeError: - # No type hints: shortcut - members_defs = ((k, None, v) for k, v in cls.__dict__.items()) + # No type hints: shortcut. note: do not return a generator since we'll modify __dict__ in the loop after + members_defs = tuple((k, None, v) for k, v in cls.__dict__.items()) else: # Fill the list of potential fields definitions members_defs = [] @@ -151,7 +170,10 @@ def v_gen(): # Main loop : for each member, possibly create a field() for member_name, type_hint, default_value in members_defs: - if not include_upper and member_name == member_name.upper(): + if member_name in exclude: + # excluded explicitly + continue + elif not include_upper and member_name == member_name.upper(): # excluded uppercase continue elif (include_dunder and is_reserved_dunder(member_name)) \ @@ -160,6 +182,12 @@ def v_gen(): continue elif isinstance(default_value, Field): # already a field, no need to create + # but in order to preserve relative order with generated fields, detach and attach again + try: + delattr(cls, member_name) + except AttributeError: + pass + setattr(cls, member_name, default_value) continue elif isinstance(default_value, property) or isdatadescriptor(default_value) \ or ismethoddescriptor(default_value): @@ -177,18 +205,27 @@ def v_gen(): new_field = field(check_type=need_to_check_type) else: # optional field : copy the default value by default - new_field = field(check_type=need_to_check_type, default_factory=copy_value(default_value)) + try: + # autocheck: make sure that we will be able to create copies later + deepcopy(default_value) + except Exception as e: + raise ValueError("The provided default value for field %r=%r can not be deep-copied: " + "caught error %r" % (member_name, default_value, e)) + new_field = field(check_type=need_to_check_type, + default_factory=copy_value(default_value, autocheck=False)) - # Attach the newly created field to the class + # Attach the newly created field to the class. Delete attr first so that order is preserved + # even if one of them had only an annotation. + try: + delattr(cls, member_name) + except AttributeError: + pass setattr(cls, member_name, new_field) new_field.set_as_cls_member(cls, member_name, type_hint=type_hint) # Finally, make init if not already explicitly present - if make_init and ('__init__' not in cls.__dict__): - new_init = mkinit() - cls.__init__ = new_init - # attach explicitly to the class so that the descriptor is correctly completed. - new_init.__set_name__(cls, '__init__') + if make_init: + _make_init(cls) return cls # end of _autofields(cls) @@ -214,3 +251,331 @@ def is_dunder(name): def is_reserved_dunder(name): return name in ('__doc__', '__name__', '__qualname__', '__module__', '__code__', '__globals__', '__dict__', '__closure__', '__annotations__') # '__defaults__', '__kwdefaults__') + + +_dict, _hash = dict, hash +"""Aliases for autoclass body""" + + +def autoclass( + # --- autofields + fields=True, # type: Union[bool, DecoratedClass] + typecheck=False, # type: bool + # --- constructor + init=True, # type: bool + # --- class methods + dict=True, # type: bool + dict_public_only=True, # type: bool + repr=True, # type: bool + repr_curly_mode=False, # type: bool + repr_public_only=True, # type: bool + eq=True, # type: bool + eq_public_only=False, # type: bool + hash=True, # type: bool + hash_public_only=False, # type: bool + # --- advanced + af_include_upper=False, # type: bool + af_include_dunder=False, # type: bool + af_exclude=DEFAULT_EXCLUDED, # type: Iterable[str] + ac_include=None, # type: Union[str, Tuple[str]] + ac_exclude=None, # type: Union[str, Tuple[str]] +): + """ + A decorator to automate many things at once for your class. + + First if `fields=True` (default) it executes `@autofields` to generate fields from attribute defined at class + level. + + - you can include attributes with dunder names or uppercase names with `af_include_dunder` and + `af_include_upper` respectively + - you can enable type checking on all fields at once by setting `check_types=True` + - the constructor is not generated at this stage + + Then it generates methods for the class: + + - if `init=True` (default) it generates the constructor based on all fields present, using `make_init()`. + - if `dict=True` (default) it generates `to_dict` and `from_dict` methods. Only public fields are represented in + `to_dict`, you can change this with `dict_public_only=False`. + - if `repr=True` (default) it generates a `__repr__` method. Only public fields are represented, you can change + this with `repr_public_only=False`. + - if `eq=True` (default) it generates an `__eq__` method, so that instances can be compared to other instances and + to dicts. All fields are compared by default, you can change this with `eq_public_only=True`. + - if `hash=True` (default) it generates an `__hash__` method, so that instances can be inserted in sets or dict + keys. All fields are hashed by default, you can change this with `hash_public_only=True`. + + You can specify an explicit list of fields to include or exclude in the dict/repr/eq/hash methods with the + `ac_include` and `ac_exclude` parameters. + + Note that this decorator is similar to the [autoclass library](https://smarie.github.io/python-autoclass/) but is + reimplemented here. In particular the parameter names and dictionary behaviour are different. + + :param fields: boolean flag (default: True) indicating whether to create fields automatically. See `@autofields` + for details + :param typecheck: boolean flag (default: False) used when fields=True indicating the value of `check_type` + for created fields. Note that the type hint of each created field is copied from the type hint of the member it + originates from. + :param init: boolean flag (default: True) indicating whether a constructor should be created for the class if + no `__init__` method is already present. Such constructor will be created using `__init__ = make_init()`. + This is the same behaviour than `make_init` in `@autofields`. Note that this is *not* automatically disabled if + you set `fields=False`. + :param dict: a boolean to automatically create `cls.from_dict(dct)` and `obj.to_dict()` methods on the class + (default: True). + :param dict_public_only: a boolean (default: True) to indicate if only public fields should be + exposed in the dictionary view created by `to_dict` when `dict=True`. + :param repr: a boolean (default: True) to indicate if `__repr__` and `__str__` should be created for the class if + not explicitly present. + :param repr_curly_mode: a boolean (default: False) to turn on an alternate string representation when `repr=True`, + using curly braces. + :param repr_public_only: a boolean (default: True) to indicate if only public fields should be + exposed in the string representation when `repr=True`. + :param eq: a boolean (default: True) to indicate if `__eq__` should be created for the class if not explicitly + present. + :param eq_public_only: a boolean (default: False) to indicate if only public fields should be + compared in the equality method created when `eq=True`. + :param hash: a boolean (default: True) to indicate if `__hash__` should be created for the class if not explicitly + present. + :param hash_public_only: a boolean (default: False) to indicate if only public fields should be + hashed in the hash method created when `hash=True`. + :param af_include_upper: boolean flag (default: False) used when autofields=True indicating whether + upper-case class members should be also transformed to fields (usually such names are reserved for class + constants, not for fields). + :param af_include_dunder: boolean flag (default: False) used when autofields=True indicating whether + dunder-named class members should be also transformed to fields. Note that even if you set this to True, + members with reserved python dunder names will not be transformed. See `is_reserved_dunder` for the list of + reserved names. + :param af_exclude: a tuple of explicit attribute names to exclude from automatic fields creation. See + `@autofields(exclude=...)` for details. + :param ac_include: a tuple of explicit attribute names to include in dict/repr/eq/hash (None means all) + :param ac_exclude: a tuple of explicit attribute names to exclude in dict/repr/eq/hash. In such case, + include should be None. + :return: + """ + if not fields and (af_include_dunder or af_include_upper or typecheck): + raise ValueError("Not able to set af_include_dunder or af_include_upper or typecheck when fields=False") + + # switch between args and actual symbols for readability + dict_on = dict + dict = _dict + hash_on = hash + hash = _hash + + # Create the decorator function + def _apply_decorator(cls): + + # create fields automatically + if fields: + cls = autofields(check_types=typecheck, include_upper=af_include_upper, + exclude=af_exclude, include_dunder=af_include_dunder, make_init=False)(cls) + + # make init if not already explicitly present + if init: + _make_init(cls) + + # list all fields + all_pyfields = get_fields(cls) + if len(all_pyfields) == 0: + raise ValueError("No fields detected on class %s (including inherited ones)" % cls) + + # filter selected + all_names = tuple(f.name for f in all_pyfields) + selected_names = filter_names(all_names, include=ac_include, exclude=ac_exclude, caller="@autoclass") + public_selected_names = tuple(n for n in selected_names if not n.startswith('_')) + + # to/from dict + if dict_on: + dict_names = public_selected_names if dict_public_only else selected_names + if "to_dict" not in cls.__dict__: + + def to_dict(self): + """ Generated by @pyfields.autoclass based on the class fields """ + return {n: getattr(self, n) for n in dict_names} + + cls.to_dict = to_dict + if "from_dict" not in cls.__dict__: + + def from_dict(cls, dct): + """ Generated by @pyfields.autoclass """ + return cls(**dct) + + cls.from_dict = classmethod(from_dict) + + # __str__ and __repr__ + if repr: + repr_names = public_selected_names if repr_public_only else selected_names + if not repr_curly_mode: # default + + def __repr__(self): + """ Generated by @pyfields.autoclass based on the class fields """ + return '%s(%s)' % (self.__class__.__name__, + ', '.join('%s=%r' % (k, getattr(self, k)) for k in repr_names)) + else: + def __repr__(self): + """ Generated by @pyfields.autoclass based on the class fields """ + return '%s(**{%s})' % (self.__class__.__name__, + ', '.join('%r: %r' % (k, getattr(self, k)) for k in repr_names)) + + if "__repr__" not in cls.__dict__: + cls.__repr__ = __repr__ + if "__str__" not in cls.__dict__: + cls.__str__ = __repr__ + + # __eq__ + if eq: + eq_names = public_selected_names if eq_public_only else selected_names + + def __eq__(self, other): + """ Generated by @pyfields.autoclass based on the class fields """ + if isinstance(other, dict): + # comparison with dicts only when a to_dict method is available + try: + _self_to_dict = self.to_dict + except AttributeError: + return False + else: + return _self_to_dict() == other + elif isinstance(self, other.__class__): + # comparison with objects of the same class or a parent + try: + for att_name in eq_names: + if getattr(self, att_name) != getattr(other, att_name): + return False + except AttributeError: + return False + else: + return True + elif isinstance(other, self.__class__): + # other is a subtype: call method on other + return other.__eq__(self) # same as NotImplemented ? + else: + # classes are not related: False + return False + + if "__eq__" not in cls.__dict__: + cls.__eq__ = __eq__ + + # __hash__ + if hash_on: + hash_names = public_selected_names if hash_public_only else selected_names + + def __hash__(self): + """ Generated by @autoclass. Implements the __hash__ method by hashing a tuple of field values """ + + # note: Should we prepend a unique hash for the class as `attrs` does ? + # return hash(tuple([type(self)] + [getattr(self, att_name) for att_name in added])) + # > No, it seems more intuitive to not do that. + # Warning: the consequence is that instances of subtypes will have the same hash has instance of their + # parent class if they have all the same attribute values + + return hash(tuple(getattr(self, att_name) for att_name in hash_names)) + + if "__hash__" not in cls.__dict__: + cls.__hash__ = __hash__ + + return cls + + # Apply: Decorator vs decorator factory logic + if isinstance(fields, type): + # called without parenthesis: directly apply decorator on first argument + cls = fields + fields = True # set it back to its default value + return _apply_decorator(cls) + else: + # called with parenthesis: return a decorator function + return _apply_decorator + + +def filter_names(all_names, + include=None, # type: Union[str, Tuple[str]] + exclude=None, # type: Union[str, Tuple[str]] + caller="" # type: str + ): + # type: (...) -> Iterable[str] + """ + Common validator for include and exclude arguments + + :param all_names: + :param include: + :param exclude: + :param caller: + :return: + """ + if include is not None and exclude is not None: + raise ValueError("Only one of 'include' or 'exclude' argument should be provided.") + + # check that include/exclude don't contain names that are incorrect + selected_names = all_names + if include is not None: + if exclude is not None: + raise ValueError('Only one of \'include\' or \'exclude\' argument should be provided.') + + # get the selected names and check that all names in 'include' are actually valid names + included = (include,) if isinstance(include, str) else tuple(include) + incorrect = set(included) - set(all_names) + if len(incorrect) > 0: + raise ValueError("`%s` definition exception: `include` contains %r that is/are " + "not part of %r" % (caller, incorrect, all_names)) + selected_names = included + + elif exclude is not None: + excluded_set = {exclude} if isinstance(exclude, str) else set(exclude) + incorrect = excluded_set - set(all_names) + if len(incorrect) > 0: + raise ValueError("`%s` definition exception: exclude contains %r that is/are " + "not part of %r" % (caller, incorrect, all_names)) + selected_names = tuple(n for n in all_names if n not in excluded_set) + + return selected_names + + +# def method_already_there(cls, +# method_name, # type: str +# this_class_only=False # type: bool +# ): +# # type: (...) -> bool +# """ +# Returns True if method `method_name` is already implemented by object_type, that is, its implementation differs +# from the one in `object`. +# +# :param cls: +# :param method_name: +# :param this_class_only: +# :return: +# """ +# if this_class_only: +# return method_name in cls.__dict__ # or vars(cls) +# else: +# method = getattr(cls, method_name, None) +# return method is not None and method is not getattr(object, method_name, None) + + +def getownattr(cls, attrib_name): + """ + Return the value of `cls.` if it is defined in the class (and not inherited). + If the attribute is not present or is inherited, an `AttributeError` is raised. + + >>> class A(object): + ... a = 1 + >>> + >>> class B(A): + ... pass + >>> + >>> getownattr(A, 'a') + 1 + >>> getownattr(A, 'unknown') + Traceback (most recent call last): + ... + AttributeError: type object 'A' has no attribute 'unknown' + >>> getownattr(B, 'a') + Traceback (most recent call last): + ... + AttributeError: type object 'B' has no directly defined attribute 'a' + + """ + attr = getattr(cls, attrib_name) + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + raise AttributeError("type object %r has no directly defined attribute %r" % (cls.__name__, attrib_name)) + + return attr diff --git a/pyfields/core.py b/pyfields/core.py index 9be5421..01ff649 100644 --- a/pyfields/core.py +++ b/pyfields/core.py @@ -1,7 +1,7 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. - +# License: 3-clause BSD, import sys from enum import Enum from textwrap import dedent @@ -11,16 +11,16 @@ from inspect import signature, Parameter except ImportError: # noinspection PyUnresolvedReferences,PyPackageRequirements - from funcsigs import signature, Parameter + from funcsigs import signature, Parameter # noqa from valid8 import ValidationFailure, is_pep484_nonable -from pyfields.typing_utils import assert_is_of_type, FieldTypeError +from pyfields.typing_utils import assert_is_of_type, FieldTypeError, get_type_hints from pyfields.validate_n_convert import FieldValidator, make_converters_list, trace_convert try: # python 3.5+ # noinspection PyUnresolvedReferences - from typing import List, Callable, Type, Any, Union, Iterable, Tuple, TypeVar + from typing import Callable, Type, Any, Union, Iterable, Tuple, TypeVar _NoneType = type(None) use_type_hints = sys.version_info > (3, 0) if use_type_hints: @@ -37,13 +37,6 @@ PY36 = sys.version_info >= (3, 6) -get_type_hints = None -if PY36: - try: - from typing import get_type_hints - except ImportError: - pass - PY2 = sys.version_info < (3, 0) # PY35 = sys.version_info >= (3, 5) @@ -113,6 +106,7 @@ class Symbols(Enum): EMPTY = 2 # type: Any USE_FACTORY = 3 _unset = 4 + DELAYED = 5 def __repr__(self): """ More compact representation for signatures readability""" @@ -127,6 +121,7 @@ def __repr__(self): # EMPTY = sentinel.create('empty') EMPTY = Symbols.EMPTY +DELAYED = Symbols.DELAYED # USE_FACTORY = sentinel.create('use_factory') USE_FACTORY = Symbols.USE_FACTORY @@ -258,16 +253,19 @@ def set_as_cls_member(self, self.name = name # if not already manually overridden, get the type hints if there are some in the owner class annotations - if self.type_hint is EMPTY: + if self.type_hint is EMPTY or self.type_hint is DELAYED: + # first reconciliate both ways to get the hint if owner_cls_type_hints is not None: if type_hint is not None: raise ValueError("Provide either owner_cls_type_hints or type_hint, not both") type_hint = owner_cls_type_hints.get(name) + # then use it if type_hint is not None: # only use type hint if not empty self.type_hint = type_hint - # update the 'nonable' status + # update the 'nonable' status - only if not already explicitly set. + # note: if this is UNKNOWN, we already saw that self.default is not None. No need to check again. if self.nonable is UNKNOWN: if is_pep484_nonable(type_hint): self.nonable = True @@ -296,8 +294,17 @@ def __set_name__(self, ): if owner is not None: # fill all the information about how it is attached to the class - cls_type_hints = get_type_hints(owner) - self.set_as_cls_member(owner, name, owner_cls_type_hints=cls_type_hints) + # resolve type hint strings and get "optional" type hint automatically + # note: we need to pass an appropriate local namespace so that forward refs work. + # this seems like a bug in `get_type_hints` ? + try: + cls_type_hints = get_type_hints(owner) + except NameError: + # probably an issue of forward reference, or PEP563 is activated. Delay checking for later + self.set_as_cls_member(owner, name, type_hint=DELAYED) + else: + # nominal usage + self.set_as_cls_member(owner, name, owner_cls_type_hints=cls_type_hints) @property def qualname(self): @@ -701,7 +708,7 @@ def field(type_hint=None, # type: Union[Type[T], Iterable[Type[T]]] - @lazy_attribute (sagemath) - @cached_property (werkzeug) and https://stackoverflow.com/questions/24704147/python-what-is-a-lazy-property - - https://stackoverflow.com/questions/42023852/how-can-i-get-the-attribute-name-when-working-with-descriptor-protocol-in-python + - https://stackoverflow.com/q/42023852/7262247 - attrs / dataclasses :param type_hint: an optional explicit type hint for the field, to override the type hint defined by PEP484 @@ -711,8 +718,8 @@ def field(type_hint=None, # type: Union[Type[T], Iterable[Type[T]]] :param nonable: a boolean that can be used to explicitly declare that a field can contain `None`. When this is set to an explicit `True` or `False` value, usual type checking and validation (*if any*) are not anymore executed on `None` values. Instead ; if this is `True`, type checking and validation will be *deactivated* when the field - is set to `None` so as to always accept the value. If this is `False`, an `None`error will be raised when `None` is - set on the field. + is set to `None` so as to always accept the value. If this is `False`, an `None`error will be raised when `None` + is set on the field. When this is left as `GUESS` (default), the behaviour is "automatic". This means that - if the field (a) is optional with default value `None` or (b) has type hint `typing.Optional[]`, the behaviour will be the same as with `nonable=True`. @@ -809,18 +816,13 @@ def __get__(self, obj, obj_type): # do this first, because a field might be referenced from its class the first time it will be used # for example if in `make_init` we use a field defined in another class, that was not yet accessed on instance. - if not PY36 and self.name is None: + if self.name is None or self.type_hint is DELAYED: # __set_name__ was not called yet. lazy-fix the name and type hints fix_field(obj_type, self) if obj is None: - # class-level call ? - # TODO put back when https://youtrack.jetbrains.com/issue/PY-38151 is solved - # return self - # even this does not work - # exec("o = self", globals(), locals()) - # return locals()['o'] - raise ClassFieldAccessError(self) + # class-level call: https://youtrack.jetbrains.com/issue/PY-38151 is solved, we can now return self + return self # Check if the field is already set in the object __dict__ value = obj.__dict__.get(self.name, _unset) @@ -998,15 +1000,13 @@ def __get__(self, obj, obj_type): # do this first, because a field might be referenced from its class the first time it will be used # for example if in `make_init` we use a field defined in another class, that was not yet accessed on instance. - if not PY36 and self.name is None: + if self.name is None or self.type_hint is DELAYED: # __set_name__ was not called yet. lazy-fix the name and type hints fix_field(obj_type, self) if obj is None: - # class-level call ? - # TODO put back when https://youtrack.jetbrains.com/issue/PY-38151 is solved - # return self - raise ClassFieldAccessError(self) + # class-level call: https://youtrack.jetbrains.com/issue/PY-38151 is solved, we can now return self + return self private_name = '_' + self.name @@ -1066,15 +1066,14 @@ def __set__(self, # do this first, because a field might be referenced from its class the first time it will be used # for example if in `make_init` we use a field defined in another class, that was not yet accessed on instance. - if not PY36 and self.name is None: + if self.name is None or self.type_hint is DELAYED: # __set_name__ was not called yet. lazy-fix the name and type hints fix_field(obj.__class__, self) - if obj is None: - # class-level call ? - # TODO put back when https://youtrack.jetbrains.com/issue/PY-38151 is solved - # return self - raise ClassFieldAccessError(self) + # if obj is None: + # # class-level call: this never happens + # # https://youtrack.jetbrains.com/issue/PY-38151 is solved, but what do we wish to do here actually ? + # raise ClassFieldAccessError(self) if self.converters is not None: # this is an inlined version of `trace_convert` with no capture of details @@ -1083,7 +1082,7 @@ def __set__(self, try: # does the converter accept this input ? accepted = converter.accepts(obj, self, value) - except Exception: + except Exception: # noqa # ignore all exceptions from converters continue else: @@ -1091,7 +1090,7 @@ def __set__(self, # if so, let's try to convert try: converted_value = converter.convert(obj, self, value) - except Exception: + except Exception: # noqa # ignore all exceptions from converters continue else: @@ -1118,10 +1117,10 @@ def __set__(self, # check the type if self.check_type: if t is EMPTY: - raise ValueError("`check_type` is enabled on field '%s' but no type hint is available. Please provide" - "type hints or set `field.check_type` to `False`. Note that python code is not able to" - " read type comments so if you wish to be compliant with python < 3.6 you'll have to" - "set the type hint explicitly in `field.type_hint` instead") + raise ValueError("`check_type` is enabled on field '%s' but no type hint is available. Please " + "provide type hints or set `field.check_type` to `False`. Note that python code is" + " not able to read type comments so if you wish to be compliant with python < 3.6 " + "you'll have to set the type hint explicitly in `field.type_hint` instead") if USE_ADVANCED_TYPE_CHECKER: # take into account all the subtleties from `typing` module by relying on 3d party providers. diff --git a/pyfields/helpers.py b/pyfields/helpers.py index d418aea..9b30b0c 100644 --- a/pyfields/helpers.py +++ b/pyfields/helpers.py @@ -1,8 +1,10 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. +# License: 3-clause BSD, +import sys from copy import copy, deepcopy -from inspect import getmro +from inspect import getmro, isclass try: from typing import Union, Type, TypeVar @@ -51,6 +53,7 @@ def yield_fields(cls, include_inherited=True, # type: bool remove_duplicates=True, # type: bool ancestors_first=True, # type: bool + public_only=False, # type: bool _auto_fix_fields=False # type: bool ): """ @@ -60,6 +63,7 @@ def yield_fields(cls, :param include_inherited: :param remove_duplicates: :param ancestors_first: + :param public_only: :param _auto_fix_fields: :return: """ @@ -97,6 +101,9 @@ def yield_fields(cls, # take this opportunity to set the name and type hints field.set_as_cls_member(_cls, member_name, owner_cls_type_hints=_cls_pep484_member_type_hints) + if public_only and member_name.startswith('_'): + continue + if remove_duplicates: if member_name in _already_found_names: continue @@ -140,29 +147,87 @@ class or one of its ancestors. If `False`, the fields need to be defined on the return any(yield_fields(cls, include_inherited=include_inherited)) -def get_fields(cls, +if sys.version_info >= (3, 7): + ODict = dict +else: + from collections import OrderedDict + ODict = OrderedDict + + +def get_field_values(obj, + include_inherited=True, # type: bool + remove_duplicates=True, # type: bool + ancestors_first=True, # type: bool + public_only=False, # type: bool + container_type=ODict, # type: Type[T] + _auto_fix_fields=False # type: bool + ): + """ + Utility method to collect all field names and values defined on an object, including all inherited or not. + + By default duplicates are removed and ancestor fields are included and appear first. If a field is overridden, + it will appear at the position of the overridden field in the order. + + The result is an ordered dictionary (a `dict` in python 3.7, an `OrderedDict` otherwise) of {name: value} pairs. + One can change the container type with the `container_type` attribute though, that will receive an iterable of + (key, value) pairs. + + :param obj: + :param include_inherited: + :param remove_duplicates: + :param ancestors_first: + :param public_only: + :param container_type: + :param _auto_fix_fields: + :return: + """ + fields_gen = yield_fields(obj.__class__, include_inherited=include_inherited, public_only=public_only, + remove_duplicates=remove_duplicates, ancestors_first=ancestors_first, + _auto_fix_fields=_auto_fix_fields) + + return container_type((f.name, getattr(obj, f.name)) for f in fields_gen) + + +def safe_isclass(obj # type: object + ): + # type: (...) -> bool + """Ignore any exception via isinstance on Python 3.""" + try: + return isclass(obj) + except Exception: + return False + + +def get_fields(cls_or_obj, include_inherited=True, # type: bool remove_duplicates=True, # type: bool ancestors_first=True, # type: bool + public_only=False, # type: bool container_type=tuple, # type: Type[T] _auto_fix_fields=False # type: bool ): # type: (...) -> T """ - Utility method to collect all fields defined in a class, including all inherited or not. + Utility method to collect all fields defined in a class, including all inherited or not, in definition order. By default duplicates are removed and ancestor fields are included and appear first. If a field is overridden, it will appear at the position of the overridden field in the order. - :param cls: + If an object is provided, `get_fields` will be executed on its class. + + :param cls_or_obj: :param include_inherited: :param remove_duplicates: :param ancestors_first: + :param public_only: :param container_type: :param _auto_fix_fields: :return: the fields (by default, as a tuple) """ - return container_type(yield_fields(cls, include_inherited=include_inherited, + if not safe_isclass(cls_or_obj): + cls_or_obj = cls_or_obj.__class__ + + return container_type(yield_fields(cls_or_obj, include_inherited=include_inherited, public_only=public_only, remove_duplicates=remove_duplicates, ancestors_first=ancestors_first, _auto_fix_fields=_auto_fix_fields)) @@ -185,7 +250,8 @@ def get_fields(cls, def copy_value(val, - deep=True # type: bool + deep=True, # type: bool + autocheck=True # type: bool ): """ Returns a default value factory to be used in a `field(default_factory=...)`. @@ -195,12 +261,28 @@ def copy_value(val, :param val: the (mutable) value to copy :param deep: by default deep copies will be created. You can change this behaviour by setting this to `False` + :param autocheck: if this is True (default), an initial copy will be created when the method is called, so as to + alert the user early if this leads to errors. :return: """ if deep: + if autocheck: + try: + # autocheck: make sure that we will be able to create copies later + deepcopy(val) + except Exception as e: + raise ValueError("The provided default value %r can not be deep-copied: caught error %r" % (val, e)) + def create_default(obj): return deepcopy(val) else: + if autocheck: + try: + # autocheck: make sure that we will be able to create copies later + copy(val) + except Exception as e: + raise ValueError("The provided default value %r can not be copied: caught error %r" % (val, e)) + def create_default(obj): return copy(val) diff --git a/pyfields/init_makers.py b/pyfields/init_makers.py index a1d856c..f6f20b8 100644 --- a/pyfields/init_makers.py +++ b/pyfields/init_makers.py @@ -1,6 +1,7 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. +# License: 3-clause BSD, import sys from inspect import isfunction, getmro from itertools import islice @@ -12,8 +13,7 @@ try: # python 3.5+ - from typing import Optional, Set, List, Callable, Dict, Type, Any, TypeVar, Union, Iterable, Tuple, Mapping - from valid8.common_syntax import ValidationFuncs + from typing import List, Callable, Any, Union, Iterable, Tuple use_type_hints = sys.version_info > (3, 0) except ImportError: use_type_hints = False @@ -329,7 +329,8 @@ def __get__(self, obj, objtype): user_init_fun=self.user_init_fun, user_init_args_before=self.user_init_args_before) # replace it forever in the class - setattr(objtype, '__init__', new_init) + # setattr(objtype, '__init__', new_init) + objtype.__init__ = new_init # return the new init return new_init.__get__(obj, objtype) diff --git a/pyfields/init_makers.pyi b/pyfields/init_makers.pyi index b295e32..5ade279 100644 --- a/pyfields/init_makers.pyi +++ b/pyfields/init_makers.pyi @@ -1,3 +1,7 @@ +# Authors: Sylvain MARIE +# + All contributors to +# +# License: 3-clause BSD, from typing import Union, Any, Callable, Iterable from pyfields.core import Field diff --git a/pyfields/tests/_test_py36.py b/pyfields/tests/_test_py36.py index 07138bd..9e0c8aa 100644 --- a/pyfields/tests/_test_py36.py +++ b/pyfields/tests/_test_py36.py @@ -4,8 +4,8 @@ import pytest -from typing import List -from pyfields import field, inject_fields, MandatoryFieldInitError, make_init, autofields +from typing import List, Optional +from pyfields import field, inject_fields, MandatoryFieldInitError, make_init, autofields, autoclass def _test_class_annotations(): @@ -169,3 +169,43 @@ class Rectangle: y: PositiveInt return Rectangle + + +def test_issue_74(): + @autofields + class City: + name: Optional[str] + buildings: List[str] = [] + + return City + + +def test_issue_76(): + @autofields + class Foo: + c: int + b: str = "hello" + a: int = field(default=50) + + return Foo + + +def _test_autoclass2(): + @autoclass() + class Foo: + msg: str + age: int = 12 + height: int = field(default=50) + + return Foo + + +def _test_autoclass3(): + + @autoclass(typecheck=True, dict=False) + class Foo: + msg: str + age: int = 12 + height: int = field(default=50) + + return Foo diff --git a/pyfields/tests/issues/_test_py36.py b/pyfields/tests/issues/_test_py36.py index 23a3ae6..e2b3c2a 100644 --- a/pyfields/tests/issues/_test_py36.py +++ b/pyfields/tests/issues/_test_py36.py @@ -1,7 +1,8 @@ # Authors: Sylvain Marie # # Copyright (c) Schneider Electric Industries, 2019. All right reserved. -from pyfields import field, init_fields, autofields + +from pyfields import field, init_fields, autofields, autoclass def test_issue_51(): @@ -25,3 +26,37 @@ def balh(self): print('asd') return Frog + + +def test_issue_73(): + class Foo: + bar: 'Foo' = field(check_type=True, nonable=True) + return Foo + + +class A: + bar: 'B' = field(check_type=True, nonable=True) + +class B: + bar: 'A' = field(check_type=True, nonable=True) + + +def test_issue_73_cross_ref(): + # note: we have to define the classes outside the function for the cross-ref to work + # indeed typing.get_type_hints() will only access the globals of the defining module + return A, B + + +def test_issue_81(): + + # note that the issue comes from autofields actually, but it was detected using autoclass + + @autoclass + class A: + a: int = 1 + + @autoclass + class B(A): + b = 0 + + return A, B diff --git a/pyfields/tests/issues/_test_py36_pep563.py b/pyfields/tests/issues/_test_py36_pep563.py new file mode 100644 index 0000000..9691233 --- /dev/null +++ b/pyfields/tests/issues/_test_py36_pep563.py @@ -0,0 +1,21 @@ +from __future__ import annotations # python 3.10 behaviour see https://www.python.org/dev/peps/pep-0563/ +from pyfields import field + + +def test_issue_73(): + class Foo: + bar: Foo = field(check_type=True, nonable=True) + return Foo + + +class A: + bar: B = field(check_type=True, nonable=True) + +class B: + bar: A = field(check_type=True, nonable=True) + + +def test_issue_73_cross_ref(): + # note: we have to define the classes outside the function for the cross-ref to work + # indeed typing.get_type_hints() will only access the globals of the defining module + return A, B diff --git a/pyfields/tests/issues/test_issue_12.py b/pyfields/tests/issues/test_issue_12.py new file mode 100644 index 0000000..563488d --- /dev/null +++ b/pyfields/tests/issues/test_issue_12.py @@ -0,0 +1,19 @@ +import inspect + +from pyfields import field +from pyfields.core import NativeField + + +def test_class_access_and_autocomplete(): + """ test that https://github.com/smarie/python-pyfields/issues/12 is resolved """ + class Foo: + a = field(type_hint=int, default=1) + + assert Foo.a.name == 'a' + assert isinstance(Foo.a, NativeField) + assert dict(inspect.getmembers(Foo))['a'] == Foo.a + + f = Foo() + assert f.a == 1 + + Foo.a = 5 diff --git a/pyfields/tests/issues/test_issue_73.py b/pyfields/tests/issues/test_issue_73.py new file mode 100644 index 0000000..e7b6b71 --- /dev/null +++ b/pyfields/tests/issues/test_issue_73.py @@ -0,0 +1,74 @@ +import sys + +import pytest +from pyfields import FieldTypeError + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="class member annotations are not supported in python < 3.6") +@pytest.mark.parametrize('str_hint', [False, True], ids="str_hint={}".format) +@pytest.mark.parametrize('fix_in_class_field', [False, True], ids="fix_in_class_field={}".format) +def test_self_referenced_class(str_hint, fix_in_class_field): + """Fix https://github.com/smarie/python-pyfields/issues/73 """ + if str_hint: + # this is the old behaviour that happens when PEP563 is not enabled at the top of the module + from ._test_py36 import test_issue_73 + Foo = test_issue_73() + else: + # this is the new behaviour that happens when PEP563 is enabled at the top of the module + if sys.version_info < (3, 7): + pytest.skip("python 3.6 does not support PEP563") + from ._test_py36_pep563 import test_issue_73 + Foo = test_issue_73() + + if fix_in_class_field: + # this will read the class fields, and the fix will happen during reading + assert Foo.bar.type_hint is Foo + + # if the fix was not done before, it is done when the field is first used + f = Foo() + with pytest.raises(FieldTypeError): + f.bar = 1 + + f.bar = f + assert f.bar is f + + if not fix_in_class_field: + # we can optionally check this now, but the mere fact that the above worked is already a proof + assert Foo.bar.type_hint is Foo + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="class member annotations are not supported in python < 3.6") +@pytest.mark.parametrize('str_hint', [False, True], ids="str_hint={}".format) +@pytest.mark.parametrize('fix_in_class_field', [False, True], ids="fix_in_class_field={}".format) +def test_cross_referenced_class(str_hint, fix_in_class_field): + if str_hint: + # this is the old behaviour that happens when PEP563 is not enabled at the top of the module + from ._test_py36 import test_issue_73_cross_ref + A, B = test_issue_73_cross_ref() + else: + # this is the new behaviour that happens when PEP563 is enabled at the top of the module + if sys.version_info < (3, 7): + pytest.skip("python 3.6 does not support PEP563") + from ._test_py36_pep563 import test_issue_73_cross_ref + A, B = test_issue_73_cross_ref() + + if fix_in_class_field: + # this will read the class fields, and the fix will happen during reading + assert A.bar.type_hint is B + assert B.bar.type_hint is A + + # if the fix was not done before, it is done when the field is first used + a = A() + with pytest.raises(FieldTypeError): + a.bar = 1 + + b = B() + a.bar = b + b.bar = a + assert a.bar is b + assert b.bar is a + + if not fix_in_class_field: + # we can optionally check this now, but the mere fact that the above worked is already a proof + assert A.bar.type_hint is B + assert B.bar.type_hint is A diff --git a/pyfields/tests/issues/test_issue_81.py b/pyfields/tests/issues/test_issue_81.py new file mode 100644 index 0000000..c28463a --- /dev/null +++ b/pyfields/tests/issues/test_issue_81.py @@ -0,0 +1,18 @@ +# Authors: Sylvain MARIE +# + All contributors to +# +# License: 3-clause BSD, +import sys +import pytest + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="class member annotations are not supported in python < 3.6") +def test_issue_81(): + """ See https://github.com/smarie/python-pyfields/issues/81 """ + from ._test_py36 import test_issue_81 + A, B = test_issue_81() + + # before the bug fix, B.a was mistakenyl recreated py autofields as an overridden mandatory field on B + assert B.a.is_mandatory is False + # this was therefore raising a "Missing required positional argument" error on the generated constructor + B(b=3) diff --git a/pyfields/tests/issues/test_issue_84.py b/pyfields/tests/issues/test_issue_84.py new file mode 100644 index 0000000..297d3fa --- /dev/null +++ b/pyfields/tests/issues/test_issue_84.py @@ -0,0 +1,89 @@ +import sys + +import pytest + +try: + from abc import ABC +except ImportError: + from abc import ABCMeta + + class ABC: + __metaclass__ = ABCMeta + + +from pyfields import autofields, field, copy_value, autoclass + + +@pytest.mark.skipif(sys.version_info < (3,), reason="This test does not yet reproduce the exception in python 2") +@pytest.mark.parametrize("auto,deep", [(False, False), (False, True), (True, None)]) +def test_issue_deepcopy_autofields(auto, deep): + """Make sure that """ + + class NotCopiable(object): + def __deepcopy__(self, memodict={}): + raise NotImplementedError() + + def __copy__(self): + raise NotImplementedError() + + default_value = NotCopiable() + + if auto: + with pytest.raises(ValueError) as exc_info: + @autofields + class Foo: + a = default_value + assert str(exc_info.value).startswith("The provided default value for field 'a'=%r can not be deep-copied" + % (default_value, )) + else: + with pytest.raises(ValueError) as exc_info: + class Foo: + a = field(default_factory=copy_value(default_value, deep=deep)) + + extra = "deep-" if deep else "" + assert str(exc_info.value).startswith("The provided default value %r can not be %scopied" + % (default_value, extra)) + + +def test_issue_84_autofields(): + """Make sure that the _abc_impl field from ABC is excluded automatically""" + + @autofields + class Foo(ABC): + a = 0 + + g = Foo() + assert g.a == 0 + + if sys.version_info < (3, 7): + # errors below wont be raised anyway + return + + with pytest.raises(ValueError) as exc_info: + @autofields(exclude=()) + class Foo(ABC): + a = 0 + + assert str(exc_info.value).startswith("The provided default value for field '_abc_impl'=") + + +def test_issue_84_autoclass(): + """Make sure that the _abc_impl field from ABC is excluded automatically""" + + @autoclass + class Foo(ABC): + a = 0 + + f = Foo() + assert str(f) == "Foo(a=0)" + + if sys.version_info < (3, 7): + # errors below wont be raised anyway + return + + with pytest.raises(ValueError) as exc_info: + @autoclass(af_exclude=()) + class Foo(ABC): + a = 0 + + assert str(exc_info.value).startswith("The provided default value for field '_abc_impl'=") diff --git a/pyfields/tests/test_autofields.py b/pyfields/tests/test_autofields.py index 83c6f42..d936c30 100644 --- a/pyfields/tests/test_autofields.py +++ b/pyfields/tests/test_autofields.py @@ -5,7 +5,7 @@ import pytest -from pyfields import autofields, field, FieldTypeError, Field, get_fields +from pyfields import autofields, field, FieldTypeError, Field, get_fields, autoclass from pyfields.core import NativeField @@ -97,3 +97,95 @@ def __set__(self, instance, value): fields = get_fields(Foo) assert len(fields) == 1 assert fields[0].name == 'foo' + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="Annotations not supported in python < 3.6") +def test_issue_74(): + """test associated with the non-issue 74""" + from ._test_py36 import test_issue_74 + City = test_issue_74() + c = City(name=None) + assert c.name is None + assert c.buildings == [] + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="Annotations not supported in python < 3.6") +def test_issue_76(): + """ order issue 76 and 77 are fixed """ + from ._test_py36 import test_issue_76 + Foo = test_issue_76() + assert [f.name for f in get_fields(Foo)] == ['c', 'b', 'a'] + + +def test_issue_76_bis(): + """ another order issue with @autofields """ + + @autofields + class Foo(object): + msg = field(type_hint=str) + age = field(default=12, type_hint=int) + + assert [f.name for f in get_fields(Foo)] == ['msg', 'age'] + + +def test_autoclass(): + """""" + + @autoclass + class Foo(object): + msg = field(type_hint=str) + age = field(default=12, type_hint=int) + + f = Foo('hey') + + # str repr + assert repr(f) == "Foo(msg='hey', age=12)" + assert str(f) == repr(f) + + # dict and eq + assert f.to_dict() == {'msg': 'hey', 'age': 12} + + same_dict = {'msg': 'hey', 'age': 12} + assert f == same_dict + assert f == Foo.from_dict(same_dict) + + diff_dict = {'age': 13, 'msg': 'hey'} + assert f != diff_dict + assert f != Foo.from_dict(diff_dict) + + assert f == Foo.from_dict(f.to_dict()) + + # hash + my_set = {f, f} + assert my_set == {f} + assert Foo('hey') in my_set + my_set.remove(Foo('hey')) + assert len(my_set) == 0 + + # subclass A + class Bar(Foo): + pass + + b = Bar(msg='hey') + assert str(b) == "Bar(msg='hey', age=12)" + assert b == f + assert f == b + + # hash + my_set = {f, b} + assert len(my_set) == 1 # yes: since the subclass does not define additional attributes. + assert my_set == {f} + + # subclass B + @autoclass + class Bar2(Foo): + ho = 3 + + b2 = Bar2('hey') + assert str(b2) == "Bar2(msg='hey', age=12, ho=3)" + assert b2 != f + assert f != b2 + + # hash + my_set = {b2, b} + assert Bar2('hey') in my_set diff --git a/pyfields/tests/test_core.py b/pyfields/tests/test_core.py index 33a507d..eca8e1a 100644 --- a/pyfields/tests/test_core.py +++ b/pyfields/tests/test_core.py @@ -72,7 +72,7 @@ class Tweety(object): with pytest.raises(ReadOnlyFieldError) as exc_info: t.afraid = False - qualname = Tweety.__dict__['afraid'].qualname + qualname = Tweety.afraid.qualname assert str(exc_info.value) == "Read-only field '%s' has already been " \ "initialized on instance %s and cannot be modified anymore." % (qualname, t) @@ -110,7 +110,7 @@ class WithSlots(object): w.a = 1 assert w.a == 1 - assert repr(WithSlots.__dict__['a']) == "" % a_fixed_name + assert repr(WithSlots.a) == "" % a_fixed_name def test_slots2(): @@ -122,6 +122,7 @@ class WithSlots(object): a_name = "test_slots2..WithSlots.a" else: a_name = ".None" + assert repr(WithSlots.__dict__['a']) == "" % a_name @@ -379,7 +380,7 @@ def f_should_be_a_multiple_of_3(self, f_val): def f_should_be_larger_than_g(self, f_val): return f_val > self.g - f_field = Foo.__dict__['f'] + f_field = Foo.f assert len(f_field.root_validator.base_validation_funcs) == 2 foo = Foo() foo.g = 0 @@ -387,11 +388,11 @@ def f_should_be_larger_than_g(self, f_val): foo.f = 2 # assert str(exc_info.value) == "Error validating [%s=2]. " \ # "InvalidValue: Function [f_should_be_a_multiple_of_3] returned [False] for value 2." \ - # % Foo.__dict__['f'].qualname + # % Foo.f.qualname assert str(exc_info.value) == "Error validating [%s=2]. At least one validation function failed for value 2. " \ "Successes: ['f_should_be_larger_than_g'] / " \ "Failures: {'f_should_be_a_multiple_of_3': 'Returned False.'}." \ - % Foo.__dict__['f'].qualname + % Foo.f.qualname foo.f = 3 foo.g = 3 with pytest.raises(ValidationError) as exc_info: @@ -400,7 +401,7 @@ def f_should_be_larger_than_g(self, f_val): "Successes: ['f_should_be_a_multiple_of_3'] / " \ "Failures: {'f_should_be_larger_than_g': " \ "'InvalidValue: not a large enough value. Returned False.'}." \ - % Foo.__dict__['f'].qualname + % Foo.f.qualname def test_validator_not_compliant_with_native_field(): @@ -443,7 +444,7 @@ def f_from_anything(self, f_val): raise Exception("no need to convert! already an int") return int(f_val) + 1 - f_field = Foo.__dict__['f'] + f_field = Foo.f assert len(f_field.converters) == 2 foo = Foo() foo.f = 0 # uses no converter at all @@ -576,7 +577,7 @@ class Foo(object): f = field(converters=convs, validators=[x % 3 == 0]) o = Foo() - f_field = Foo.__dict__['f'] + f_field = Foo.f f_converters = f_field.converters assert len(f_converters) == 1 and isinstance(f_converters[0], Converter) o.f = 3 diff --git a/pyfields/tests/test_helpers.py b/pyfields/tests/test_helpers.py new file mode 100644 index 0000000..751c748 --- /dev/null +++ b/pyfields/tests/test_helpers.py @@ -0,0 +1,37 @@ +import pytest + +from pyfields import field, get_field_values, get_fields, copy_field +from pyfields.core import PY36 + + +@pytest.mark.parametrize("a_first", [False, True], ids="ancestor_first={}".format) +@pytest.mark.parametrize("public_only", [False, True], ids="public_only={}".format) +def test_get_fields(a_first, public_only): + class A(object): + a = field() + _d = field(default=5) + + class B(object): + b = field() + + class C(B, A): + a = field(default=None) + c = field(default_factory=copy_field('b')) + + fields = get_fields(C, include_inherited=True, ancestors_first=a_first, + _auto_fix_fields=not PY36, public_only=public_only) + field_names = [f.name for f in fields] + if a_first: + assert field_names == ['a', 'b', 'c'] if public_only else ['a', '_d', 'b', 'c'] + else: + assert field_names == ['a', 'c', 'b'] if public_only else ['a', 'c', 'b', '_d'] + + obj = C() + obj.b = 2 + + fields = get_field_values(obj, ancestors_first=a_first if a_first is not None else True, _auto_fix_fields=not PY36, + container_type=list, public_only=public_only) + if a_first is None or a_first: + assert fields == [('a', None), ('b', 2), ('c', 2)] if public_only else [('a', None), ('_d', 5), ('b', 2), ('c', 2)] + else: + assert fields == [('a', None), ('c', 2), ('b', 2)] if public_only else [('a', None), ('c', 2), ('b', 2), ('_d', 5)] diff --git a/pyfields/tests/test_init.py b/pyfields/tests/test_init.py index 3e18273..259f952 100644 --- a/pyfields/tests/test_init.py +++ b/pyfields/tests/test_init.py @@ -148,7 +148,7 @@ def __init__(self): print(vars(c)) -@pytest.mark.parametrize("a_first", [None, False, True], ids="ancestor_first={}".format) +@pytest.mark.parametrize("a_first", [False, True], ids="ancestor_first={}".format) def test_init_order2(a_first): """""" class A(object): @@ -166,10 +166,9 @@ class C(B, A): def __init__(self): pass - fields = get_fields(C, include_inherited=True, ancestors_first=a_first if a_first is not None else True, - _auto_fix_fields=not PY36) + fields = get_fields(C, include_inherited=True, ancestors_first=a_first, _auto_fix_fields=not PY36) field_names = [f.name for f in fields] - if a_first is None or a_first: + if a_first: assert field_names == ['a', 'd', 'b', 'c'] else: assert field_names == ['a', 'c', 'b', 'd'] diff --git a/pyfields/tests/test_readme.py b/pyfields/tests/test_readme.py index ecff5cc..7d2a79f 100644 --- a/pyfields/tests/test_readme.py +++ b/pyfields/tests/test_readme.py @@ -6,11 +6,7 @@ from valid8 import ValidationError, ValidationFailure from pyfields import field, MandatoryFieldInitError, make_init, init_fields, ReadOnlyFieldError, NoneError, \ - FieldTypeError - - -def runs_on_travis(): - return "TRAVIS_PYTHON_VERSION" in os.environ + FieldTypeError, autoclass, get_fields def test_lazy_fields(): @@ -247,7 +243,6 @@ class Foo: a = field() b = field(native=False) - # TODO change when issue with class level access is fixed a_name = "test_native_descriptors..Foo.a" if sys.version_info >= (3, 6) else ".None" b_name = "test_native_descriptors..Foo.b" if sys.version_info >= (3, 6) else ".None" assert repr(Foo.__dict__['a']) == "" % a_name @@ -255,29 +250,30 @@ class Foo: f = Foo() - def set_a(): f.a = 12 + def set_native(): f.a = 12 - def set_b(): f.b = 12 + def set_descript(): f.b = 12 - def set_c(): f.c = 12 + def set_pynative(): f.c = 12 - ta = timeit.Timer(set_a).timeit() - tb = timeit.Timer(set_b).timeit() - tc = timeit.Timer(set_c).timeit() + # make sure that the access time for native field and native are identical + # --get rid of the first init since it is a bit longer (replacement of the descriptor with a native field + set_native() + set_descript() + set_pynative() - print("Average time (ns) setting the field:") - print("%0.2f (normal python) ; %0.2f (native field) ; %0.2f (descriptor field)" % (tc, ta, tb)) + # --now compare the executiong= times + t_native = timeit.Timer(set_native).timeit(10000000) + t_descript = timeit.Timer(set_descript).timeit(10000000) + t_pynative = timeit.Timer(set_pynative).timeit(10000000) - print("Ratio is %.2f" % (ta / tc)) + print("Average time (ns) setting the field:") + print("%0.2f (normal python) ; %0.2f (native field) ; %0.2f (descriptor field)" + % (t_pynative, t_native, t_descript)) - # make sure that the access time for native field and native are identical - # for reproducibility on travis, we have to get rid of the first init - if runs_on_travis(): - print("increasing tolerance on travis.") - assert ta / tc <= 2.0 - else: - assert ta / tc <= 1.1 - # assert abs(round(t_field_native * 10) - round(t_native * 10)) <= 1 + ratio = t_native / t_pynative + print("Ratio is %.2f" % ratio) + assert ratio <= 1.2 # def decompose(number): @@ -327,7 +323,7 @@ class Wall: class ColoredWall(Wall): color = field(default='white', doc="Color of the wall.") # type: str - __init__ = make_init(Wall.__dict__['height'], color) + __init__ = make_init(Wall.height, color) w = ColoredWall(2) assert vars(w) == {'color': 'white', 'height': 2} @@ -447,8 +443,6 @@ def test_autofields_vtypes_readme(): def test_autoclass(): """ Tests the example with autoclass in the doc """ - from autoclass import autoclass - @autoclass class Foo(object): msg = field(type_hint=str) @@ -456,10 +450,48 @@ class Foo(object): foo = Foo(msg='hello') + assert [f.name for f in get_fields(Foo)] == ['msg', 'age'] + print(foo) # automatic string representation - print(dict(foo)) # dict view + print(foo.to_dict()) # dict view assert str(foo) == "Foo(msg='hello', age=12)" - assert str(dict(foo)) in ("{'msg': 'hello', 'age': 12}", "{'age': 12, 'msg': 'hello'}") + assert str(foo.to_dict()) in ("{'msg': 'hello', 'age': 12}", "{'age': 12, 'msg': 'hello'}") assert foo == Foo(msg='hello', age=12) # comparison (equality) assert foo == {'msg': 'hello', 'age': 12} # comparison with dicts + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="not valid for old python") +def test_autoclass_2(): + from ._test_py36 import _test_autoclass2 + Foo = _test_autoclass2() + + # assert [f.name for f in get_fields(Foo)] == ['msg', 'age', 'height'] + + foo = Foo(msg='hello') + + assert repr(foo) == "Foo(msg='hello', age=12, height=50)" # automatic string representation + assert str(foo.to_dict()) # automatic dict view + + assert foo == Foo(msg='hello', age=12, height=50) # automatic equality comparison + assert foo == {'msg': 'hello', 'age': 12, 'height': 50} # automatic eq comparison with dicts + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason="not valid for old python") +def test_autoclass_3(): + from ._test_py36 import _test_autoclass3 + Foo = _test_autoclass3() + + # assert [f.name for f in get_fields(Foo)] == ['msg', 'age', 'height'] + + foo = Foo(msg='hello') + + with pytest.raises(AttributeError): + foo.to_dict() # method does not exist + + assert repr(foo) == "Foo(msg='hello', age=12, height=50)" # automatic string representation + assert foo == Foo(msg='hello', age=12, height=50) # automatic equality comparison + + # type checking ON + with pytest.raises(FieldTypeError): + foo.msg = 1 diff --git a/pyfields/tests/test_so.py b/pyfields/tests/test_so.py index 2126d31..118c3ec 100644 --- a/pyfields/tests/test_so.py +++ b/pyfields/tests/test_so.py @@ -71,7 +71,7 @@ class Position(object): p.x = 1 with pytest.raises(ValidationError) as exc_info: p.y = 101 - qualname = Position.__dict__['y'].qualname + qualname = Position.y.qualname assert str(exc_info.value) == "Error validating [%s=101]. " \ "InvalidValue: y should be between 0 and 100. " \ "Function [] returned [False] for value 101." % qualname @@ -89,7 +89,7 @@ class Spam(object): s = Spam() with pytest.raises(ValidationError) as exc_info: s.description = "" - qualname = Spam.__dict__['description'].qualname + qualname = Spam.description.qualname assert str(exc_info.value) == "Error validating [%s='']. " \ "InvalidValue: description can not be empty. " \ "Function [] returned [False] for value ''." % qualname @@ -116,7 +116,7 @@ def __init__(self, msg="hello world!"): c = Car(color='blue', name='roadie', wheels=3) assert vars(c) == {'_wheels': 3, '_name': 'roadie', '_color': 'blue'} - qualname = Car.__dict__['wheels'].qualname + qualname = Car.wheels.qualname with pytest.raises(TypeError) as exc_info: c.wheels = 'hello' @@ -182,7 +182,7 @@ def __init__(self, msg="hello world!"): p = Position(x=1, y=12) with pytest.raises(TypeError) as exc_info: p.x = '1' - qualname = Position.__dict__['x'].qualname + qualname = Position.x.qualname assert str(exc_info.value) == "Invalid value type provided for '%s'. " \ "Value should be of type %r. Instead, received a 'str': '1'" % (qualname, int) @@ -203,6 +203,6 @@ class User(object): assert vars(u) == {'_username': "earthling"} with pytest.raises(ReadOnlyFieldError) as exc_info: u.username = "earthling2" - qualname = User.__dict__['username'].qualname + qualname = User.username.qualname assert str(exc_info.value) == "Read-only field '%s' has already been initialized on instance %s and cannot be " \ "modified anymore." % (qualname, u) diff --git a/pyfields/typing_utils.py b/pyfields/typing_utils.py index d8e64a1..fed3ca1 100644 --- a/pyfields/typing_utils.py +++ b/pyfields/typing_utils.py @@ -1,6 +1,10 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. +# License: 3-clause BSD, +import sys + +from pkg_resources import get_distribution class FieldTypeError(TypeError): # FieldError @@ -16,7 +20,7 @@ def __init__(self, field, value, expected_types): try: if len(expected_types) == 1: expected_types = expected_types[0] - except: + except BaseException: pass self.expected_types = expected_types @@ -42,8 +46,20 @@ def __str__(self): def _make_assert_is_of_type(): + from packaging.version import parse as parse_version try: - from typeguard import check_type + from typeguard import check_type as ct + + # Note: only do this when we are sure that typeguard can be imported, otherwise this is slow + # see https://github.com/smarie/python-getversion/blob/ee495acf6cf06c5e860713edeee396206368e458/getversion/main.py#L84 + typeguard_version = get_distribution("typeguard").version + if parse_version(typeguard_version) < parse_version("3.0.0"): + check_type = ct + else: + # Name has disappeared from 3.0.0 + def check_type(name, value, typ): + ct(value, typ) + try: from typing import Union except ImportError: @@ -72,12 +88,12 @@ def assert_is_of_type(field, value, typ): else: # iterate and try them all e = None - for t in t_gen: + for _t in t_gen: try: check_type(field.qualname, value, typ) return # success !!!! - except Exception as e: - pass # failed: lets try another one + except Exception as e1: + e = e1 # failed: lets try another one # raise from if e is not None: @@ -129,7 +145,7 @@ def assert_is_of_type(field, value, typ): raise FieldTypeError(field, value, typ) except ImportError: - from valid8.utils.typing_inspect import is_typevar, is_union_type, get_args + # from valid8.utils.typing_inspect import is_typevar, is_union_type, get_args from valid8.utils.typing_tools import resolve_union_and_typevar def assert_is_of_type(field, value, typ): @@ -162,7 +178,26 @@ def assert_is_of_type(field, value, typ): try: # very minimal way to check if typing it available, for runtime type checking # noinspection PyUnresolvedReferences - from typing import Tuple - assert_is_of_type = _make_assert_is_of_type() + from typing import Tuple # noqa except ImportError: assert_is_of_type = None +else: + assert_is_of_type = _make_assert_is_of_type() + + +PY36 = sys.version_info >= (3, 6) +get_type_hints = None +if PY36: + try: + from typing import get_type_hints as gth + + def get_type_hints(obj, globalns=None, localns=None): + """ + Fixed version of typing.get_type_hints to handle self forward references + """ + if globalns is None and localns is None and isinstance(obj, type): + localns = {obj.__name__: obj} + return gth(obj, globalns=globalns, localns=localns) + + except ImportError: + pass diff --git a/pyfields/validate_n_convert.py b/pyfields/validate_n_convert.py index c794df2..c8022b9 100644 --- a/pyfields/validate_n_convert.py +++ b/pyfields/validate_n_convert.py @@ -1,6 +1,7 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. +# License: 3-clause BSD, import sys from collections import OrderedDict @@ -15,7 +16,7 @@ try: # python 3.5+ # noinspection PyUnresolvedReferences from typing import Callable, Type, Any, TypeVar, Union, Iterable, Tuple, Mapping, Optional, Dict, Literal - from valid8.common_syntax import ValidationFuncs + # from valid8.common_syntax import ValidationFuncs use_type_hints = sys.version_info > (3, 0) except ImportError: use_type_hints = False @@ -29,7 +30,7 @@ ValidationFunc = Union[Callable[[Any], Any], Callable[[Any, Any], Any], Callable[[Any, Any, Any], Any]] - """A validation function is a callable with signature (val), (obj, val) or (obj, field, val), returning `True` + """A validation function is a callable with signature (val), (obj, val) or (obj, field, val), returning `True` or `None` in case of success""" try: @@ -55,8 +56,8 @@ Iterable[ValidatorDef], Mapping[VFDefinitionElement, Union[VFDefinitionElement, Tuple[VFDefinitionElement, ...]]]] - """Several validators can be provided as a singleton, iterable, or dict-like. In that case the value can be a - single variable or a tuple, and it will be combined with the key to form the validator. So you can use any of + """Several validators can be provided as a singleton, iterable, or dict-like. In that case the value can be a + single variable or a tuple, and it will be combined with the key to form the validator. So you can use any of the elements defining a validators as the key.""" # shortcut name used everywhere. Less explicit @@ -171,7 +172,7 @@ def make_validator_callable(validation_callable, # type: ValidationFunc raise ValueError( "validation function should accept 1, 2, or 3 arguments at least. `f(val)`, `f(obj, val)` or " "`f(obj, field, val)`") - elif nb_args == 1 or (nb_args == 0 and nbvarargs >= 1): # varargs default to one argument (compliance with old mini lambda) + elif nb_args == 1 or (nb_args == 0 and nbvarargs >= 1): # varargs default to one argument (compliance with old mini lambda) # noqa # `f(val)` def new_validation_callable(val, **ctx): return validation_callable(val) @@ -337,7 +338,7 @@ def __init__(self, convert_fun, name=None, accepts_fun=None): ConverterFunc = Union[Callable[[Any], Any], Callable[[Any, Any], Any], Callable[[Any, Any, Any], Any]] - """A converter function is a callable with signature (val), (obj, val) or (obj, field, val), returning the + """A converter function is a callable with signature (val), (obj, val) or (obj, field, val), returning the converted value in case of success""" try: @@ -364,7 +365,7 @@ def __init__(self, convert_fun, name=None, accepts_fun=None): Converters = OneOrSeveralConverterDefinitions -def make_3params_callable(f, # Union[ValidationFunc, ConverterFunc] +def make_3params_callable(f, # type: Union[ValidationFunc, ConverterFunc] is_mini_lambda=False # type: bool ): # type: (...) -> Callable[[Any, 'Field', Any], Any] @@ -372,7 +373,8 @@ def make_3params_callable(f, # Union[ValidationFunc, Converte Transforms the provided validation or conversion callable into a callable with 3 arguments (obj, field, val). :param f: - :param is_mini_lambda: a boolean indicating if the function comes from a mini lambda. In which case we know the signature has one param only (x) + :param is_mini_lambda: a boolean indicating if the function comes from a mini lambda. In which case we know the + signature has one param only (x) :return: """ # support several cases for the function signature @@ -600,7 +602,7 @@ def __str__(self): % (self.value_to_convert, err_dct_to_str(self.err_dct)) -def err_dct_to_str(err_dct # Dict[Converter, str] +def err_dct_to_str(err_dct # type: Dict[Converter, str] ): # type: (...) -> str msg = "" @@ -617,7 +619,7 @@ class DetailedConversionResults(object): __slots__ = 'value_to_convert', 'field', 'obj', 'err_dct', 'winning_converter', 'converted_value' def __init__(self, value_to_convert, field, obj, err_dct, winning_converter, converted_value): - self.value_to_convert= value_to_convert + self.value_to_convert = value_to_convert self.field = field self.obj = obj self.err_dct = err_dct diff --git a/pyfields/validate_n_convert.pyi b/pyfields/validate_n_convert.pyi index 004ed4d..431e814 100644 --- a/pyfields/validate_n_convert.pyi +++ b/pyfields/validate_n_convert.pyi @@ -1,7 +1,7 @@ -# Authors: Sylvain Marie +# Authors: Sylvain MARIE +# + All contributors to # -# Copyright (c) Schneider Electric Industries, 2019. All right reserved. - +# License: 3-clause BSD, from valid8 import Validator, ValidationError, ValidationFailure from valid8.base import getfullargspec as v8_getfullargspec, get_callable_name, is_mini_lambda diff --git a/pyproject.toml b/pyproject.toml index a5835a1..e9efd02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,20 @@ [build-system] requires = [ - "setuptools", + "setuptools>=39.2", "setuptools_scm", - "six", + "wheel" ] build-backend = "setuptools.build_meta" + +# pip: no ! does not work in old python 2.7 and not recommended here +# https://setuptools.readthedocs.io/en/latest/userguide/quickstart.html#basic-use + +[tool.conda] +# Declare that the following packages should be installed with conda instead of pip +# Note: this includes packages declared everywhere, here and in setup.cfg +conda_packages = [ + "setuptools", + "wheel", + "pip" +] +# pytest: not with conda ! does not work in old python 2.7 and 3.5 diff --git a/setup.cfg b/setup.cfg index 18d3404..129f310 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,24 +1,144 @@ -[egg_info] -#tag_date = 1 already covered by setuptools_scm -#tag_build = .dev --this adds ".dev" at the end of the release name. we already use setuptools_scm so already covered. -#tag_svn_revision = 1 --this adds "_r0" at the end of the release name. we already use setuptools_scm so already covered. +# See https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files +# And this great example : https://github.com/Kinto/kinto/blob/master/setup.cfg +[metadata] +name = pyfields +description = Define fields in python classes. Easily. +description-file = README.md +license = BSD 3-Clause +long_description = file: docs/long_description.md +long_description_content_type=text/markdown +keywords = object class boilerplate oop field attr member descriptor attribute mix-in mixin validation type-check +author = Sylvain MARIE +maintainer = Sylvain MARIE +url = https://github.com/smarie/python-pyfields +# download_url = https://github.com/smarie/python-pyfields/tarball/master >> do it in the setup.py to get the right version +classifiers = + # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: BSD License + Topic :: Software Development :: Libraries :: Python Modules + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + +[options] +# one day these will be able to come from requirement files, see https://github.com/pypa/setuptools/issues/1951. But will it be better ? +setup_requires = + setuptools_scm + # pytest-runner +install_requires = + valid8>=5.0 + makefun + # note: do not use double quotes in these, this triggers a weird bug in PyCharm in debug mode only + funcsigs;python_version<'3.3' + enum34;python_version<'3.4' + # 'sentinel', + packaging +tests_require = + pytest + vtypes + mini-lambda + autoclass>=2.2 + typing;python_version<'3.5' + # for some reason these pytest dependencies were not declared in old versions of pytest + six;python_version<'3.6' + attr;python_version<'3.6' + pluggy;python_version<'3.6' + +# test_suite = tests --> no need apparently +# +zip_safe = False +# explicitly setting zip_safe=False to avoid downloading `ply` see https://github.com/smarie/python-getversion/pull/5 +# and makes mypy happy see https://mypy.readthedocs.io/en/latest/installed_packages.html +packages = find: +# see [options.packages.find] below +# IMPORTANT: DO NOT set the `include_package_data` flag !! It triggers inclusion of all git-versioned files +# see https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 +# include_package_data = True +[options.packages.find] +exclude = + contrib + docs + *tests* + +[options.package_data] +* = py.typed, *.pyi + + +# Optional dependencies that can be installed with e.g. $ pip install -e .[dev,test] +# [options.extras_require] + +# -------------- Packaging ----------- +# [options.entry_points] + +# [egg_info] >> already covered by setuptools_scm [bdist_wheel] -# This flag says that the code is written to work on both Python 2 and Python -# 3. If at all possible, it is good practice to do this. If you cannot, you -# will need to generate wheels for each Python version that you support. +# Code is written to work on both Python 2 and Python 3. universal=1 -[metadata] -description-file = README.md - +# ------------- Others ------------- # In order to be able to execute 'python setup.py test' # from https://docs.pytest.org/en/latest/goodpractices.html#integrating-with-setuptools-python-setup-py-test-pytest-runner [aliases] -test=pytest +test = pytest +# pytest default configuration [tool:pytest] -addopts = --verbose --doctest-modules --ignore-glob='**/_*.py' -testpaths = pyfields/ -# we need this for python 2 tests to work see https://github.com/pytest-dev/pytest/issues/2917 -filterwarnings = always +testpaths = pyfields/tests/ +addopts = + --verbose + --doctest-modules + --ignore-glob='**/_*.py' + +# we need the 'always' for python 2 tests to work see https://github.com/pytest-dev/pytest/issues/2917 +filterwarnings = + always +; ignore::UserWarning + +# Coverage config +[coverage:run] +branch = True +omit = *tests* +# this is done in nox.py (github actions) or ci_tools/run_tests.sh (travis) +# source = pyfields +# command_line = -m pytest --junitxml="reports/pytest_reports/pytest.xml" --html="reports/pytest_reports/pytest.html" -v pyfields/tests/ + +[coverage:report] +fail_under = 70 +show_missing = True +exclude_lines = + # this line for all the python 2 not covered lines + except ImportError: + # we have to repeat this when exclude_lines is set + pragma: no cover + +# Done in nox.py +# [coverage:html] +# directory = site/reports/coverage_reports +# [coverage:xml] +# output = site/reports/coverage_reports/coverage.xml + +[flake8] +max-line-length = 120 +extend-ignore = D, E203 # D: Docstring errors, E203: see https://github.com/PyCQA/pycodestyle/issues/373 +copyright-check = True +copyright-regexp = ^\#\s+Authors:\s+Sylvain MARIE \n\#\s+\+\sAll\scontributors\sto\s\n\#\n\#\s+License:\s3\-clause\sBSD,\s +exclude = + .git + .github + .nox + .pytest_cache + ci_tools + docs + */tests + noxfile.py + setup.py + */_version.py diff --git a/setup.py b/setup.py index 58e52c7..3d4397a 100644 --- a/setup.py +++ b/setup.py @@ -1,152 +1,38 @@ -"""A setuptools based setup module. -See: -https://packaging.python.org/en/latest/distributing.html -https://github.com/pypa/sampleproject """ -from os import path +To understand this project's build structure + + - This project uses setuptools, so it is declared as the build system in the pyproject.toml file + - We use as much as possible `setup.cfg` to store the information so that it can be read by other tools such as `tox` + and `nox`. So `setup.py` contains **almost nothing** (see below) + This philosophy was found after trying all other possible combinations in other projects :) + A reference project that was inspiring to make this move : https://github.com/Kinto/kinto/blob/master/setup.cfg + +See also: + https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files + https://packaging.python.org/en/latest/distributing.html + https://github.com/pypa/sampleproject +""" +from setuptools import setup + + +# (1) check required versions (from https://medium.com/@daveshawley/safely-using-setup-cfg-for-metadata-1babbe54c108) import pkg_resources -from setuptools import setup, find_packages pkg_resources.require("setuptools>=39.2") pkg_resources.require("setuptools_scm") -from setuptools_scm import get_version # noqa: E402 -# *************** Dependencies ********* -INSTALL_REQUIRES = ['valid8>=5.0', 'makefun', - 'funcsigs;python_version<"3.3"', 'enum34;python_version<"3.4"'] # 'sentinel', -DEPENDENCY_LINKS = [] -SETUP_REQUIRES = ['pytest-runner', 'setuptools_scm'] -TESTS_REQUIRE = ['pytest', 'pytest-logging', 'mini-lambda', 'typing;python_version<"3.5"', 'vtypes', 'autoclass'] -EXTRAS_REQUIRE = {} - -# ************** ID card ***************** -DISTNAME = 'pyfields' -DESCRIPTION = 'Define fields in python classes. Easily.' -MAINTAINER = 'Sylvain MARIE' -MAINTAINER_EMAIL = 'sylvain.marie@se.com' -URL = 'https://github.com/smarie/python-pyfields' -DOWNLOAD_URL = URL + '/tarball/' + get_version() -LICENSE = 'BSD 3-Clause' -LICENSE_LONG = 'License :: OSI Approved :: BSD License' -KEYWORDS = 'object class boilerplate oop field attr member descriptor attribute mix-in mixin validation type-check' +# (2) Generate download url using git version +from setuptools_scm import get_version # noqa: E402 -here = path.abspath(path.dirname(__file__)) -with open(path.join(here, 'docs', 'long_description.md')) as f: - LONG_DESCRIPTION = f.read() +URL = "https://github.com/smarie/python-pyfields" +DOWNLOAD_URL = URL + "/tarball/" + get_version() -# OBSOLETES = [] +# (3) Call setup() with as little args as possible setup( - name=DISTNAME, - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - long_description_content_type='text/markdown', - - # Versions should comply with PEP440. For a discussion on single-sourcing - # the version across setup.py and the project code, see - # https://packaging.python.org/en/latest/single_source_version.html - # version=VERSION, NOW HANDLED BY GIT - - maintainer=MAINTAINER, - maintainer_email=MAINTAINER_EMAIL, - - license=LICENSE, - url=URL, download_url=DOWNLOAD_URL, - - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 5 - Production/Stable', - - # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Libraries :: Python Modules', - - # Pick your license as you wish (should match "license" above) - LICENSE_LONG, - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - # 'Programming Language :: Python :: 2', - # 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - # 'Programming Language :: Python :: 3', - # 'Programming Language :: Python :: 3.3', - # 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - - # 'Framework :: Pytest' - ], - - # What does your project relate to? - keywords=KEYWORDS, - - # You can just specify the packages manually here if your project is - # simple. Or you can use find_packages(). - packages=find_packages(exclude=['contrib', 'docs', '*tests*']), - - # Alternatively, if you want to distribute just a my_module.py, uncomment - # this: - # py_modules=["my_module"], - - # List run-time dependencies here. These will be installed by pip when - # your project is installed. For an analysis of "install_requires" vs pip's - # requirements files see: - # https://packaging.python.org/en/latest/requirements.html - install_requires=INSTALL_REQUIRES, - dependency_links=DEPENDENCY_LINKS, - - # we're using git - use_scm_version={'write_to': '%s/_version.py' % DISTNAME}, # this provides the version + adds the date if local non-commited changes. - # use_scm_version={'local_scheme':'dirty-tag'}, # this provides the version + adds '+dirty' if local non-commited changes. - setup_requires=SETUP_REQUIRES, - - # test - # test_suite='nose.collector', - tests_require=TESTS_REQUIRE, - - # List additional groups of dependencies here (e.g. development - # dependencies). You can install these using the following syntax, - # for example: - # $ pip install -e .[dev,test] - extras_require=EXTRAS_REQUIRE, - - # obsoletes=OBSOLETES - - # If there are data files included in your packages that need to be - # installed, specify them here. If using Python 2.6 or less, then these - # have to be included in MANIFEST.in as well. - # Note: we use the empty string so that this also works with submodules - package_data={"": ['py.typed', '*.pyi']}, - # IMPORTANT: DO NOT set the `include_package_data` flag !! It triggers inclusion of all git-versioned files - # see https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 - # include_package_data=True, - - # Although 'package_data' is the preferred approach, in some case you may - # need to place data files outside of your packages. See: - # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa - # In this case, 'data_file' will be installed into '/my_data' - # data_files=[('my_data', ['data/data_file'])], - - # To provide executable scripts, use entry points in preference to the - # "scripts" keyword. Entry points provide cross-platform support and allow - # pip to create the appropriate form of executable for the target platform. - # entry_points={ - # 'console_scripts': [ - # 'sample=sample:main', - # ], - # }, - - # explicitly setting the flag to avoid `ply` being downloaded - # see https://github.com/smarie/python-getversion/pull/5 - # and to make mypy happy - # see https://mypy.readthedocs.io/en/latest/installed_packages.html - zip_safe=False, + use_scm_version={ + "write_to": "pyfields/_version.py" + }, # we can't put `use_scm_version` in setup.cfg yet unfortunately )