diff --git a/composer/dag_test_utils/CHANGELOG.md b/composer/dag_test_utils/CHANGELOG.md new file mode 100644 index 00000000000..db9a989f4cb --- /dev/null +++ b/composer/dag_test_utils/CHANGELOG.md @@ -0,0 +1,10 @@ +# CHANGELOG + +## 1.0.0 - May 21, 2021 +* Upgrade to Airflow 2.0.0 +* Add CHANGELOG +* Add README to PyPI +* Add MAINTAINING + +## 0.0.1 - May 20, 2021 +* Push package compatible with Airflow 1.10.15 to PyPI \ No newline at end of file diff --git a/composer/dag_test_utils/MAINTAINING.md b/composer/dag_test_utils/MAINTAINING.md new file mode 100644 index 00000000000..c568d199fda --- /dev/null +++ b/composer/dag_test_utils/MAINTAINING.md @@ -0,0 +1,38 @@ +# Maintenance Guide + +## How to release a new version +* Make relevant code changes +* Increment the version number in [setup.py](./setup.py) following [semver](https://semver.org/) +* Add changes to the [CHANGELOG](./CHANGELOG.md) +* If any usage info has changed, update the [README](./README.md) +* [Test the distribution locally](#how-to-test-the-distribution-locally) in the [workflows directory](../workflows) +* Open a PR and request reviews from a [Python Samples owner](https://github.com/orgs/GoogleCloudPlatform/teams/python-samples-owners) and a [Composer Codeowner](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/.github/CODEOWNERS#L24) +* Once the PR is approved and merged, [push the new version to PyPI](#how-to-push-the-new-version-to-pypi) + +## How to test the distribution locally +* In a `virtualenv`, run `pip install build` +* Run `python -m build` to build the package. It will create a `dist/` directory containg a wheel (`.whl`) and a tar (`.tar.gz`). +* Change the [`requirements-test.txt`](../workflows/requirements-test.txt) file in the [`workflows`](../workflows) directory to import your distribution relatively, ensuring that `x.y.z` is replaced with your version number + +``` +../dag_test_utils/dist/cloud_composer_dag_test_utils-x.y.z.tar.gz +``` + +* Run `nox -s py-3.8 -- quickstart_test.py` (or the entire nox session, if you prefer) - if it passes without an `ImportError`, congrats! You've done it! + + +## How to push the new version to PyPI +Note - these instructions are derived from [the official guide](https://packaging.python.org/tutorials/packaging-projects/) - if they seem to be out of date, please contact `cloud-dpes-composer@`. + +You may only do this after you have successfully tested the package locally and had your PR approved and merged to the primary branch of `python-docs-samples`. + +You will need access to the `cloud-dpes-composer` PyPI account and will need an API token. Reach out to `cloud-dpes-composer@` for access. + +* In a `virtualenv`, run `pip install build` +* Run `python -m build` to build the package. It will create a `dist/` directory containg a wheel (`.whl`) and a tar (`.tar.gz`). +* Run `pip install twine` +* Run `python -m twine upload --repository pypi dist/*` +* For username, put `__token__`. For password, put your API token. +* Voila! Your new version is fully released! + + diff --git a/composer/dag_test_utils/README.md b/composer/dag_test_utils/README.md index 710764ed93a..8e863ab48c3 100644 --- a/composer/dag_test_utils/README.md +++ b/composer/dag_test_utils/README.md @@ -1,6 +1,6 @@ # Cloud Composer DAG Testing Utility -This package is used internally to unit test the validity of all Cloud Composer sample DAGs. It is not supported for external production use. +This package is used internally to unit test the validity of all Cloud Composer sample DAGs. It is not supported for external production use. The [latest release can be found on PyPI](https://pypi.org/project/cloud-composer-dag-test-utils/). ## Instructions diff --git a/composer/dag_test_utils/internal_unit_testing.py b/composer/dag_test_utils/internal_unit_testing.py index 6c8010721ba..dbd8bcb124c 100644 --- a/composer/dag_test_utils/internal_unit_testing.py +++ b/composer/dag_test_utils/internal_unit_testing.py @@ -16,6 +16,7 @@ # [START composer_dag_unit_testing] from airflow import models +from airflow.utils.dag_cycle_tester import test_cycle def assert_has_valid_dag(module): @@ -26,7 +27,7 @@ def assert_has_valid_dag(module): for dag in vars(module).values(): if isinstance(dag, models.DAG): no_dag_found = False - dag.test_cycle() # Throws if a task cycle is found. + test_cycle(dag) # Throws if a task cycle is found. if no_dag_found: raise AssertionError('module does not contain a valid DAG') diff --git a/composer/dag_test_utils/setup.py b/composer/dag_test_utils/setup.py index 9dfab90dca9..6e514082b29 100644 --- a/composer/dag_test_utils/setup.py +++ b/composer/dag_test_utils/setup.py @@ -15,13 +15,22 @@ from setuptools import find_packages from setuptools import setup +# read the contents of your README file +from os import path +this_directory = path.abspath(path.dirname(__file__)) +with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: + long_description = f.read() + + setup( name="cloud_composer_dag_test_utils", - version="0.0.1", + version="1.0.0", url="https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/composer/dag_test_utils", author="Google LLC", description="Utility used to unit test example Apache Airflow DAGs for Google Cloud Composer. This is not an officially supported Google product.", + long_description=long_description, + long_description_content_type='text/markdown', packages=find_packages(), py_modules=['internal_unit_testing'], - install_requires=['apache-airflow[google]==1.10.15'] + install_requires=['apache-airflow[google] >= 2.0.0, < 3.0.0'] ) diff --git a/composer/workflows/requirements-test.txt b/composer/workflows/requirements-test.txt index 55c3757968a..31e58332d69 100644 --- a/composer/workflows/requirements-test.txt +++ b/composer/workflows/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.4 -cloud-composer-dag-test-utils==0.0.1 \ No newline at end of file +cloud-composer-dag-test-utils==0.0.1