diff --git a/.travis.yml b/.travis.yml index b90b3a30c7..2241fd16e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,7 @@ sudo: required +dist: xenial # needed for more recent python 3 and python3-venv + language: generic services: @@ -7,7 +9,8 @@ services: before_install: - travis_retry sudo apt update -qq - - travis_retry sudo apt install -qq --no-install-recommends python2.7 python3 + - travis_retry sudo apt install -qq --no-install-recommends python2.7 python3 python3-venv python3-virtualenv + # (venv/virtualenv are both used by tests/test_pythonpackage.py) - sudo pip install tox>=2.0 # https://github.com/travis-ci/travis-ci/issues/6069#issuecomment-266546552 - git remote set-branches --add origin master @@ -28,7 +31,8 @@ env: before_script: # we want to fail fast on tox errors without having to `docker build` first - - tox + - tox -- tests/ --ignore tests/test_pythonpackage.py + # (we ignore test_pythonpackage.py since these run way too long!! test_pythonpackage_basic.py will still be run.) script: - docker build --tag=p4a --file Dockerfile.py3 . diff --git a/Dockerfile.py3 b/Dockerfile.py3 index 307f8ddfda..6a8286e9fc 100644 --- a/Dockerfile.py3 +++ b/Dockerfile.py3 @@ -95,7 +95,8 @@ ENV WORK_DIR="${HOME_DIR}" \ # install system dependencies RUN ${RETRY} apt -y install -qq --no-install-recommends \ - python3 virtualenv python3-pip wget lbzip2 patch sudo \ + python3 virtualenv python3-pip python3-venv \ + wget lbzip2 patch sudo \ && apt -y autoremove # build dependencies diff --git a/doc/source/distutils.rst b/doc/source/distutils.rst index 17a4070071..2ee999ffef 100644 --- a/doc/source/distutils.rst +++ b/doc/source/distutils.rst @@ -2,8 +2,71 @@ distutils/setuptools integration ================================ -Instead of running p4a via the command line, you can integrate with -distutils and setup.py. +Have `p4a apk` run setup.py (replaces ``--requirements``) +--------------------------------------------------------- + +If your project has a `setup.py` file, then it can be executed by +`p4a` when your app is packaged such that your app properly ends up +in the packaged site-packages. (Use ``--use-setup-py`` to enable this, +``--ignore-setup-py`` to prevent it) + +This is functionality to run **setup.py INSIDE `p4a apk`,** as opposed +to the other section below, which is about running +*p4a inside setup.py*. + +This however has these caveats: + +- **Only your ``main.py`` from your app's ``--private`` data is copied + into the .apk!** Everything else needs to be installed by your + ``setup.py`` into the site-packages, or it won't be packaged. + +- All dependencies that map to recipes can only be pinned to exact + versions, all other constraints will either just plain not work + or even cause build errors. (Sorry, our internal processing is + just not smart enough to honor them properly at this point) + +- If you don't use Python 3 per default, you still need to specify + ``--requirements python2`` (without any additional dependencies) + +- The dependency analysis at the start may be quite slow and delay + your build + +Reasons why you would want to use a `setup.py` to be processed (and +omit specifying ``--requirements``): + +- You want to use a more standard mechanism to specify dependencies + instead of ``--requirements`` + +- You already use a `setup.py` for other platforms + +- Your application imports itself + in a way that won't work unless installed to site-packages) + + +Reasons **not** to use a `setup.py` (that is to use the usual +``--requirements`` mechanism instead): + +- You don't use a `setup.py` yet, and prefer the simplicity of + just specifying ``--requirements`` + +- Your `setup.py` assumes a desktop platform and pulls in + Android-incompatible dependencies, and you are not willing + to change this, or you want to keep it separate from Android + deployment for other organizational reasons + +- You need data files to be around that aren't installed by + your `setup.py` into the site-packages folder + + +Use your setup.py to call p4a +----------------------------- + +Instead of running p4a via the command line, you can call it via +`setup.py` instead, by it integrating with distutils and setup.py. + +This is functionality to run **p4a INSIDE setup.py,** as opposed +to the other section above, which is about running +*setup.py inside `p4a apk`*. The base command is:: @@ -44,7 +107,7 @@ All of these automatic arguments can be overridden by passing them manually on t python setup.py apk --name="Testapp Setup" --version=2.5 Adding p4a arguments in setup.py --------------------------------- +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Instead of providing extra arguments on the command line, you can store them in setup.py by passing the ``options`` parameter to @@ -79,7 +142,7 @@ setup.py apk``. Any options passed on the command line will override these values. Adding p4a arguments in setup.cfg ---------------------------------- +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can also provide p4a arguments in the setup.cfg file, as normal for distutils. The syntax is:: diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst index 967d6ed755..6fc7ffc469 100644 --- a/doc/source/quickstart.rst +++ b/doc/source/quickstart.rst @@ -298,6 +298,14 @@ your own Kivy branch you might set:: The specified directory will be copied into python-for-android instead of downloading from the normal url specified in the recipe. +setup.py file (experimental) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If your application is also packaged for desktop using `setup.py`, +you may want to use your `setup.py` instead of the +``--requirements`` option to avoid specifying things twice. +For that purpose, check out :doc:`distutils` + Going further ~~~~~~~~~~~~~ diff --git a/pythonforandroid/bootstraps/common/build/build.py b/pythonforandroid/bootstraps/common/build/build.py index 38fe2b7744..61dde31327 100644 --- a/pythonforandroid/bootstraps/common/build/build.py +++ b/pythonforandroid/bootstraps/common/build/build.py @@ -23,12 +23,14 @@ import jinja2 -def get_dist_info_for(key): +def get_dist_info_for(key, error_if_missing=True): try: with open(join(dirname(__file__), 'dist_info.json'), 'r') as fileh: info = json.load(fileh) - value = str(info[key]) + value = info[key] except (OSError, KeyError) as e: + if not error_if_missing: + return None print("BUILD FAILURE: Couldn't extract the key `" + key + "` " + "from dist_info.json: " + str(e)) sys.exit(1) @@ -304,18 +306,45 @@ def make_package(args): f.write("P4A_MINSDK=" + str(args.min_sdk_version) + "\n") # Package up the private data (public not supported). + use_setup_py = get_dist_info_for("use_setup_py", + error_if_missing=False) is True tar_dirs = [env_vars_tarpath] - if args.private: - tar_dirs.append(args.private) - for python_bundle_dir in ('private', 'crystax_python', '_python_bundle'): - if exists(python_bundle_dir): - tar_dirs.append(python_bundle_dir) - if get_bootstrap_name() == "webview": - tar_dirs.append('webview_includes') - if args.private or args.launcher: - make_tar( - join(assets_dir, 'private.mp3'), tar_dirs, args.ignore_path, - optimize_python=args.optimize_python) + _temp_dirs_to_clean = [] + try: + if args.private: + if not use_setup_py or ( + not exists(join(args.private, "setup.py")) and + not exists(join(args.private, "pyproject.toml")) + ): + print('No setup.py/pyproject.toml used, copying ' + 'full private data into .apk.') + tar_dirs.append(args.private) + else: + print('Copying main.py ONLY, since other app data is ' + 'expected in site-packages.') + main_py_only_dir = tempfile.mkdtemp() + _temp_dirs_to_clean.append(main_py_only_dir) + if exists(join(args.private, "main.pyo")): + shutil.copyfile(join(args.private, "main.pyo"), + join(main_py_only_dir, "main.pyo")) + elif exists(join(args.private, "main.py")): + shutil.copyfile(join(args.private, "main.py"), + join(main_py_only_dir, "main.py")) + tar_dirs.append(main_py_only_dir) + for python_bundle_dir in ('private', + 'crystax_python', + '_python_bundle'): + if exists(python_bundle_dir): + tar_dirs.append(python_bundle_dir) + if get_bootstrap_name() == "webview": + tar_dirs.append('webview_includes') + if args.private or args.launcher: + make_tar( + join(assets_dir, 'private.mp3'), tar_dirs, args.ignore_path, + optimize_python=args.optimize_python) + finally: + for directory in _temp_dirs_to_clean: + shutil.rmtree(directory) # Remove extra env vars tar-able directory: shutil.rmtree(env_vars_tarpath) @@ -361,9 +390,7 @@ def make_package(args): version_code = 0 if not args.numeric_version: # Set version code in format (arch-minsdk-app_version) - with open(join(dirname(__file__), 'dist_info.json'), 'r') as dist_info: - dist_data = json.load(dist_info) - arch = dist_data["archs"][0] + arch = get_dist_info_for("archs")[0] arch_dict = {"x86_64": "9", "arm64-v8a": "8", "armeabi-v7a": "7", "x86": "6"} arch_code = arch_dict.get(arch, '1') min_sdk = args.min_sdk_version diff --git a/pythonforandroid/build.py b/pythonforandroid/build.py index 734103b20c..7bce14c937 100644 --- a/pythonforandroid/build.py +++ b/pythonforandroid/build.py @@ -9,6 +9,7 @@ import sys import re import sh +import shutil import subprocess from pythonforandroid.util import ( @@ -17,6 +18,7 @@ ) from pythonforandroid.logger import (info, warning, info_notify, info_main, shprint) from pythonforandroid.archs import ArchARM, ArchARMv7_a, ArchAarch_64, Archx86, Archx86_64 +from pythonforandroid.pythonpackage import get_package_name from pythonforandroid.recipe import CythonRecipe, Recipe from pythonforandroid.recommendations import ( check_ndk_version, check_target_api, check_ndk_api, @@ -43,6 +45,9 @@ class Context(object): libs_dir = None aars_dir = None + # Whether setup.py or similar should be used if present: + use_setup_py = False + ccache = None # whether to use ccache cython = None # the cython interpreter name @@ -486,21 +491,10 @@ def has_package(self, name, arch=None): if not os.path.exists(name): # Non-existing dir, cannot look this up. return False - if os.path.exists(os.path.join(name, "setup.py")): - # Get name from setup.py: - name = subprocess.check_output([ - sys.executable, "setup.py", "--name"], - cwd=name) - try: - name = name.decode('utf-8', 'replace') - except AttributeError: - pass - name = name.strip() - if len(name) == 0: - # Failed to look up any meaningful name. - return False - else: - # A folder with whatever, cannot look this up. + try: + name = get_package_name(os.path.abspath(name)) + except ValueError: + # Failed to look up any meaningful name. return False # Try to look up recipe by name: @@ -523,7 +517,9 @@ def not_has_package(self, name, arch=None): return not self.has_package(name, arch) -def build_recipes(build_order, python_modules, ctx): +def build_recipes(build_order, python_modules, ctx, project_dir, + ignore_project_setup_py=False + ): # Put recipes in correct build order info_notify("Recipe build order is {}".format(build_order)) if python_modules: @@ -578,22 +574,56 @@ def build_recipes(build_order, python_modules, ctx): recipe.postbuild_arch(arch) info_main('# Installing pure Python modules') - run_pymodules_install(ctx, python_modules) + run_pymodules_install( + ctx, python_modules, project_dir, + ignore_setup_py=ignore_project_setup_py + ) return -def run_pymodules_install(ctx, modules): +def project_has_setup_py(project_dir): + if project_dir is not None and \ + (os.path.exists(os.path.join(project_dir, + "setup.py")) or + os.path.exists(os.path.join(project_dir, + "pyproject.toml")) + ): + return True + return False + + +def run_pymodules_install(ctx, modules, project_dir, ignore_setup_py=False): + """ This function will take care of all non-recipe things, by: + + 1. Processing them from --requirements (the modules argument) + and installing them + + 2. Installing the user project/app itself via setup.py if + ignore_setup_py=True + + """ + + info('*** PYTHON PACKAGE / PROJECT INSTALL STAGE ***') modules = list(filter(ctx.not_has_package, modules)) - if not modules: - info('There are no Python modules to install, skipping') + # Bail out if no python deps and no setup.py to process: + if not modules and ( + ignore_setup_py or + not project_has_setup_py(project_dir) + ): + info('No Python modules and no setup.py to process, skipping') return - info('The requirements ({}) don\'t have recipes, attempting to install ' - 'them with pip'.format(', '.join(modules))) - info('If this fails, it may mean that the module has compiled ' - 'components and needs a recipe.') + # Output messages about what we're going to do: + if modules: + info('The requirements ({}) don\'t have recipes, attempting to ' + 'install them with pip'.format(', '.join(modules))) + info('If this fails, it may mean that the module has compiled ' + 'components and needs a recipe.') + if project_has_setup_py(project_dir) and not ignore_setup_py: + info('Will process project install, if it fails then the ' + 'project may not be compatible for Android install.') venv = sh.Command(ctx.virtualenv) with current_directory(join(ctx.build_dir)): @@ -605,16 +635,6 @@ def run_pymodules_install(ctx, modules): 'venv' ) - info('Creating a requirements.txt file for the Python modules') - with open('requirements.txt', 'w') as fileh: - for module in modules: - key = 'VERSION_' + module - if key in environ: - line = '{}=={}\n'.format(module, environ[key]) - else: - line = '{}\n'.format(module) - fileh.write(line) - # Prepare base environment and upgrade pip: base_env = copy.copy(os.environ) base_env["PYTHONPATH"] = ctx.get_site_packages_dir() @@ -638,13 +658,6 @@ def run_pymodules_install(ctx, modules): env = copy.copy(base_env) env.update(recipe_env) - info('Installing Python modules with pip') - info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' - 'A reason for this is often modules compiling ' - 'native code that is unaware of Android cross-compilation ' - 'and does not work without additional ' - 'changes / workarounds.') - # Make sure our build package dir is available, and the virtualenv # site packages come FIRST (so the proper pip version is used): env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() @@ -653,12 +666,140 @@ def run_pymodules_install(ctx, modules): "python" + ctx.python_recipe.major_minor_version_string, "site-packages")) + ":" + env["PYTHONPATH"] - # Do actual install: - shprint(sh.bash, '-c', ( - "venv/bin/pip " + - "install -v --target '{0}' --no-deps -r requirements.txt" - ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), - _env=copy.copy(env)) + # Install the manually specified requirements first: + if not modules: + info('There are no Python modules to install, skipping') + else: + info('Creating a requirements.txt file for the Python modules') + with open('requirements.txt', 'w') as fileh: + for module in modules: + key = 'VERSION_' + module + if key in environ: + line = '{}=={}\n'.format(module, environ[key]) + else: + line = '{}\n'.format(module) + fileh.write(line) + + info('Installing Python modules with pip') + info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' + 'A reason for this is often modules compiling ' + 'native code that is unaware of Android cross-compilation ' + 'and does not work without additional ' + 'changes / workarounds.') + + shprint(sh.bash, '-c', ( + "venv/bin/pip " + + "install -v --target '{0}' --no-deps -r requirements.txt" + ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), + _env=copy.copy(env)) + + # Afterwards, run setup.py if present: + if project_has_setup_py(project_dir) and not ignore_setup_py: + with current_directory(project_dir): + info('got setup.py or similar, running project install. ' + + '(disable this behavior with --ignore-setup-py)') + + # Compute & output the constraints we will use: + info('Contents that will be used for constraints.txt:') + constraints = subprocess.check_output([ + join( + ctx.build_dir, "venv", "bin", "pip" + ), + "freeze" + ], env=copy.copy(env)) + try: + constraints = constraints.decode("utf-8", "replace") + except AttributeError: + pass + info(constraints) + + # Make sure all packages found are fixed in version + # by writing a constraint file, to avoid recipes being + # upgraded & reinstalled: + with open('constraints.txt', 'wb') as fileh: + fileh.write(constraints.encode("utf-8", "replace")) + + info('Populating venv\'s site-packages with ' + 'ctx.get_site_packages_dir()...') + + # Copy dist contents into site-packages for discovery. + # Why this is needed: + # --target is somewhat evil and messes with discovery of + # packages in PYTHONPATH if that also includes the target + # folder. So we need to use the regular virtualenv + # site-packages folder instead. + # Reference: + # https://github.com/pypa/pip/issues/6223 + ctx_site_packages_dir = os.path.normpath( + os.path.abspath(ctx.get_site_packages_dir()) + ) + venv_site_packages_dir = os.path.normpath(os.path.join( + ctx.build_dir, "venv", "lib", [ + f for f in os.listdir(os.path.join( + ctx.build_dir, "venv", "lib" + )) if f.startswith("python") + ][0], "site-packages" + )) + copied_over_contents = [] + for f in os.listdir(ctx_site_packages_dir): + full_path = os.path.join(ctx_site_packages_dir, f) + if not os.path.exists(os.path.join( + venv_site_packages_dir, f + )): + if os.path.isdir(full_path): + shutil.copytree(full_path, os.path.join( + venv_site_packages_dir, f + )) + else: + shutil.copy2(full_path, os.path.join( + venv_site_packages_dir, f + )) + copied_over_contents.append(f) + + # Get listing of virtualenv's site-packages, to see the + # newly added things afterwards & copy them back into + # the distribution folder / build context site-packages: + previous_venv_contents = os.listdir(venv_site_packages_dir) + + # Actually run setup.py: + info('Launching package install...') + shprint(sh.bash, '-c', ( + "'" + join( + ctx.build_dir, "venv", "bin", "pip" + ).replace("'", "'\"'\"'") + "' " + + "install -c constraints.txt -v ." + ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), + _env=copy.copy(env)) + + # Go over all new additions and copy them back: + info('Copying additions resulting from setup.py back ' + + 'into ctx.get_site_packages_dir()...') + new_venv_additions = [] + for f in (set(os.listdir(venv_site_packages_dir)) - + set(previous_venv_contents)): + new_venv_additions.append(f) + full_path = os.path.join(venv_site_packages_dir, f) + if os.path.isdir(full_path): + shutil.copytree(full_path, os.path.join( + ctx_site_packages_dir, f + )) + else: + shutil.copy2(full_path, os.path.join( + ctx_site_packages_dir, f + )) + + # Undo all the changes we did to the venv-site packages: + info('Reverting additions to virtualenv\'s site-packages...') + for f in set(copied_over_contents + new_venv_additions): + full_path = os.path.join(venv_site_packages_dir, f) + if os.path.isdir(full_path): + shutil.rmtree(full_path) + else: + os.remove(full_path) + elif not ignore_setup_py: + info("No setup.py found in project directory: " + + str(project_dir) + ) # Strip object files after potential Cython or native code builds: standard_recipe.strip_object_files(ctx.archs[0], env, diff --git a/pythonforandroid/distribution.py b/pythonforandroid/distribution.py index 9fa7b4c6b2..f088ac01e6 100644 --- a/pythonforandroid/distribution.py +++ b/pythonforandroid/distribution.py @@ -214,6 +214,7 @@ def save_info(self, dirn): 'bootstrap': self.ctx.bootstrap.name, 'archs': [arch.arch for arch in self.ctx.archs], 'ndk_api': self.ctx.ndk_api, + 'use_setup_py': self.ctx.use_setup_py, 'recipes': self.ctx.recipe_build_order + self.ctx.python_modules, 'hostpython': self.ctx.hostpython, 'python_version': self.ctx.python_recipe.major_minor_version_string}, diff --git a/pythonforandroid/pythonpackage.py b/pythonforandroid/pythonpackage.py new file mode 100644 index 0000000000..8b2d552602 --- /dev/null +++ b/pythonforandroid/pythonpackage.py @@ -0,0 +1,693 @@ +""" This module offers highlevel functions to get package metadata + like the METADATA file, the name, or a list of dependencies. + + Usage examples: + + # Getting package name from pip reference: + from pytonforandroid.pythonpackage import get_package_name + print(get_package_name("pillow")) + # Outputs: "Pillow" (note the spelling!) + + # Getting package dependencies: + from pytonforandroid.pythonpackage import get_package_dependencies + print(get_package_dependencies("pep517")) + # Outputs: "['pytoml']" + + # Get package name from arbitrary package source: + from pytonforandroid.pythonpackage import get_package_name + print(get_package_name("/some/local/project/folder/")) + # Outputs package name + + NOTE: + + Yes, this module doesn't fit well into python-for-android, but this + functionality isn't available ANYWHERE ELSE, and upstream (pip, ...) + currently has no interest in taking this over, so it has no other place + to go. + (Unless someone reading this puts it into yet another packaging lib) + + Reference discussion/upstream inclusion attempt: + + https://github.com/pypa/packaging-problems/issues/247 + +""" + + +from io import open # needed for python 2 +import os +from pep517.envbuild import BuildEnvironment +from pep517.wrappers import Pep517HookCaller +import pytoml +import shutil +import subprocess +import sys +import tarfile +import tempfile +import textwrap +import time +try: + from urllib.parse import urlparse + from urllib.parse import unquote as urlunquote +except ImportError: # Python 2... + from urlparse import urlparse + from urlparse import unquote as urlunquote +import zipfile + + +def transform_dep_for_pip(dependency): + if dependency.find("@") > 0: + # WORKAROUND FOR UPSTREAM BUG: + # https://github.com/pypa/pip/issues/6097 + # (Please REMOVE workaround once that is fixed & released upstream!) + # + # Basically, setup_requires() can contain a format pip won't install + # from a requirements.txt (PEP 508 URLs). + # To avoid this, translate to an #egg-name= reference: + url = (dependency.partition("@")[2].strip() + + "#egg-name=" + + dependency.partition("@")[0].strip() + ) + return url + return dependency + + +def extract_metainfo_files_from_package( + package, + output_folder, + debug=False + ): + """ Extracts metdata files from the given package to the given folder, + which may be referenced in any way that is permitted in + a requirements.txt file or install_requires=[] listing. + + Current supported metadata files that will be extracted: + + - pytoml.yml (only if package wasn't obtained as wheel) + - METADATA + """ + + if package is None: + raise ValueError("package cannot be None") + + if not os.path.exists(output_folder) or os.path.isfile(output_folder): + raise ValueError("output folder needs to be existing folder") + + # A temp folder for making a package copy in case it's a local folder, + # because extracting metadata might modify files + # (creating sdists/wheels...) + temp_folder = tempfile.mkdtemp(prefix="pythonpackage-package-copy-") + try: + # Package is indeed a folder! Get a temp copy to work on: + if is_filesystem_path(package): + shutil.copytree( + parse_as_folder_reference(package), + os.path.join(temp_folder, "package") + ) + package = os.path.join(temp_folder, "package") + + # Because PEP517 can be noisy and contextlib.redirect_* fails to + # contain it, we will run the actual analysis in a separate process: + try: + subprocess.check_output([ + sys.executable, + "-c", + "import importlib\n" + "import json\n" + "import os\n" + "import sys\n" + "sys.path = [os.path.dirname(sys.argv[3])] + sys.path\n" + "m = importlib.import_module(\n" + " os.path.basename(sys.argv[3]).partition('.')[0]\n" + ")\n" + "m._extract_metainfo_files_from_package_unsafe(" + " sys.argv[1]," + " sys.argv[2]," + ")", + package, output_folder, os.path.abspath(__file__)], + stderr=subprocess.STDOUT, # make sure stderr is muted. + cwd=os.path.join(os.path.dirname(__file__), "..") + ) + except subprocess.CalledProcessError as e: + output = e.output.decode("utf-8", "replace") + if debug: + print("Got error obtaining meta info.") + print("Detail output:") + print(output) + print("End of Detail output.") + raise ValueError( + "failed to obtain meta info - " + "is '{}' a valid package? " + "Detailed output:\n{}".format(package, output) + ) + finally: + shutil.rmtree(temp_folder) + + +def _get_system_python_executable(): + """ Returns the path the system-wide python binary. + (In case we're running in a virtualenv or venv) + """ + # This function is required by get_package_as_folder() to work + # inside a virtualenv, since venv creation will fail with + # the virtualenv's local python binary. + # (venv/virtualenv incompatibility) + + # Abort if not in virtualenv or venv: + if not hasattr(sys, "real_prefix") and ( + not hasattr(sys, "base_prefix") or + os.path.normpath(sys.base_prefix) == + os.path.normpath(sys.prefix)): + return sys.executable + + # Extract prefix we need to look in: + if hasattr(sys, "real_prefix"): + search_prefix = sys.real_prefix # virtualenv + else: + search_prefix = sys.base_prefix # venv + + def python_binary_from_folder(path): + def binary_is_usable(python_bin): + try: + filenotfounderror = FileNotFoundError + except NameError: # Python 2 + filenotfounderror = OSError + try: + subprocess.check_output([ + os.path.join(path, python_bin), "--version" + ], stderr=subprocess.STDOUT) + return True + except (subprocess.CalledProcessError, filenotfounderror): + return False + + python_name = "python" + sys.version + while (not binary_is_usable(python_name) and + python_name.find(".") > 0): + # Try less specific binary name: + python_name = python_name.rpartition(".")[0] + if binary_is_usable(python_name): + return os.path.join(path, python_name) + return None + + # Return from sys.real_prefix if present: + result = python_binary_from_folder(search_prefix) + if result is not None: + return result + + # Check out all paths in $PATH: + bad_candidates = [] + good_candidates = [] + ever_had_nonvenv_path = False + for p in os.environ.get("PATH", "").split(":"): + # Skip if not possibly the real system python: + if not os.path.normpath(p).startswith( + os.path.normpath(search_prefix) + ): + continue + + # First folders might be virtualenv/venv we want to avoid: + if not ever_had_nonvenv_path: + sep = os.path.sep + if ("system32" not in p.lower() and "usr" not in p) or \ + {"home", ".tox"}.intersection(set(p.split(sep))) or \ + "users" in p.lower(): + # Doesn't look like bog-standard system path. + if (p.endswith(os.path.sep + "bin") or + p.endswith(os.path.sep + "bin" + os.path.sep)): + # Also ends in "bin" -> likely virtualenv/venv. + # Add as unfavorable / end of candidates: + bad_candidates.append(p) + continue + ever_had_nonvenv_path = True + + good_candidates.append(p) + + # See if we can now actually find the system python: + for p in good_candidates + bad_candidates: + result = python_binary_from_folder(p) + if result is not None: + return result + + raise RuntimeError("failed to locate system python in: " + + sys.real_prefix) + + +def get_package_as_folder(dependency): + """ This function downloads the given package / dependency and extracts + the raw contents into a folder. + + Afterwards, it returns a tuple with the type of distribution obtained, + and the temporary folder it extracted to. It is the caller's + responsibility to delete the returned temp folder after use. + + Examples of returned values: + + ("source", "/tmp/pythonpackage-venv-e84toiwjw") + ("wheel", "/tmp/pythonpackage-venv-85u78uj") + + What the distribution type will be depends on what pip decides to + download. + """ + + venv_parent = tempfile.mkdtemp( + prefix="pythonpackage-venv-" + ) + try: + # Create a venv to install into: + try: + if int(sys.version.partition(".")[0]) < 3: + # Python 2.x has no venv. + subprocess.check_output([ + sys.executable, # no venv conflict possible, + # -> no need to use system python + "-m", "virtualenv", + "--python=" + _get_system_python_executable(), + os.path.join(venv_parent, 'venv') + ], cwd=venv_parent) + else: + # On modern Python 3, use venv. + subprocess.check_output([ + _get_system_python_executable(), "-m", "venv", + os.path.join(venv_parent, 'venv') + ], cwd=venv_parent) + except subprocess.CalledProcessError as e: + output = e.output.decode('utf-8', 'replace') + raise ValueError( + 'venv creation unexpectedly ' + + 'failed. error output: ' + str(output) + ) + venv_path = os.path.join(venv_parent, "venv") + + # Update pip and wheel in venv for latest feature support: + try: + filenotfounderror = FileNotFoundError + except NameError: # Python 2. + filenotfounderror = OSError + try: + subprocess.check_output([ + os.path.join(venv_path, "bin", "pip"), + "install", "-U", "pip", "wheel", + ]) + except filenotfounderror: + raise RuntimeError( + "venv appears to be missing pip. " + "did we fail to use a proper system python??\n" + "system python path detected: {}\n" + "os.environ['PATH']: {}".format( + _get_system_python_executable(), + os.environ.get("PATH", "") + ) + ) + + # Create download subfolder: + os.mkdir(os.path.join(venv_path, "download")) + + # Write a requirements.txt with our package and download: + with open(os.path.join(venv_path, "requirements.txt"), + "w", encoding="utf-8" + ) as f: + def to_unicode(s): # Needed for Python 2. + try: + return s.decode("utf-8") + except AttributeError: + return s + f.write(to_unicode(transform_dep_for_pip(dependency))) + try: + subprocess.check_output( + [ + os.path.join(venv_path, "bin", "pip"), + "download", "--no-deps", "-r", "../requirements.txt", + "-d", os.path.join(venv_path, "download") + ], + stderr=subprocess.STDOUT, + cwd=os.path.join(venv_path, "download") + ) + except subprocess.CalledProcessError as e: + raise RuntimeError("package download failed: " + str(e.output)) + + if len(os.listdir(os.path.join(venv_path, "download"))) == 0: + # No download. This can happen if the dependency has a condition + # which prohibits install in our environment. + # (the "package ; ... conditional ... " type of condition) + return (None, None) + + # Get the result and make sure it's an extracted directory: + result_folder_or_file = os.path.join( + venv_path, "download", + os.listdir(os.path.join(venv_path, "download"))[0] + ) + dl_type = "source" + if not os.path.isdir(result_folder_or_file): + # Must be an archive. + if result_folder_or_file.endswith((".zip", ".whl")): + if result_folder_or_file.endswith(".whl"): + dl_type = "wheel" + with zipfile.ZipFile(result_folder_or_file) as f: + f.extractall(os.path.join(venv_path, + "download", "extracted" + )) + result_folder_or_file = os.path.join( + venv_path, "download", "extracted" + ) + elif result_folder_or_file.find(".tar.") > 0: + # Probably a tarball. + with tarfile.open(result_folder_or_file) as f: + f.extractall(os.path.join(venv_path, + "download", "extracted" + )) + result_folder_or_file = os.path.join( + venv_path, "download", "extracted" + ) + else: + raise RuntimeError( + "unknown archive or download " + + "type: " + str(result_folder_or_file) + ) + + # If the result is hidden away in an additional subfolder, + # descend into it: + while os.path.isdir(result_folder_or_file) and \ + len(os.listdir(result_folder_or_file)) == 1 and \ + os.path.isdir(os.path.join( + result_folder_or_file, + os.listdir(result_folder_or_file)[0] + )): + result_folder_or_file = os.path.join( + result_folder_or_file, + os.listdir(result_folder_or_file)[0] + ) + + # Copy result to new dedicated folder so we can throw away + # our entire virtualenv nonsense after returning: + result_path = tempfile.mkdtemp() + shutil.rmtree(result_path) + shutil.copytree(result_folder_or_file, result_path) + return (dl_type, result_path) + finally: + shutil.rmtree(venv_parent) + + +def _extract_metainfo_files_from_package_unsafe( + package, + output_path + ): + # This is the unwrapped function that will + # 1. make lots of stdout/stderr noise + # 2. possibly modify files (if the package source is a local folder) + # Use extract_metainfo_files_from_package_folder instead which avoids + # these issues. + + clean_up_path = False + path_type = "source" + path = parse_as_folder_reference(package) + if path is None: + # This is not a path. Download it: + (path_type, path) = get_package_as_folder(package) + if path_type is None: + # Download failed. + raise ValueError( + "cannot get info for this package, " + + "pip says it has no downloads (conditional dependency?)" + ) + clean_up_path = True + + try: + build_requires = [] + metadata_path = None + if path_type != "wheel": + # We need to process this first to get the metadata. + + # Ensure pyproject.toml is available (pep517 expects it) + if not os.path.exists(os.path.join(path, "pyproject.toml")): + with open(os.path.join(path, "pyproject.toml"), "w") as f: + f.write(textwrap.dedent(u"""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """)) + + # Copy the pyproject.toml: + shutil.copyfile( + os.path.join(path, 'pyproject.toml'), + os.path.join(output_path, 'pyproject.toml') + ) + + # Get build backend from pyproject.toml: + with open(os.path.join(path, 'pyproject.toml')) as f: + build_sys = pytoml.load(f)['build-system'] + backend = build_sys["build-backend"] + + # Get a virtualenv with build requirements and get all metadata: + env = BuildEnvironment() + metadata = None + with env: + hooks = Pep517HookCaller(path, backend) + env.pip_install([transform_dep_for_pip(req) for req in build_requires]) + reqs = hooks.get_requires_for_build_wheel({}) + env.pip_install([transform_dep_for_pip(req) for req in reqs]) + try: + metadata = hooks.prepare_metadata_for_build_wheel(path) + except Exception: # sadly, pep517 has no good error here + pass + if metadata is not None: + metadata_path = os.path.join( + path, metadata, "METADATA" + ) + else: + # This is a wheel, so metadata should be in *.dist-info folder: + metadata_path = os.path.join( + path, + [f for f in os.listdir(path) if f.endswith(".dist-info")][0], + "METADATA" + ) + + # Copy the metadata file: + shutil.copyfile(metadata_path, os.path.join(output_path, "METADATA")) + finally: + if clean_up_path: + shutil.rmtree(path) + + +def is_filesystem_path(dep): + """ Convenience function around parse_as_folder_reference() to + check if a dependency refers to a folder path or something remote. + + Returns True if local, False if remote. + """ + return (parse_as_folder_reference(dep) is not None) + + +def parse_as_folder_reference(dep): + """ See if a dependency reference refers to a folder path. + If it does, return the folder path (which parses and + resolves file:// urls in the process). + If it doesn't, return None. + """ + # Special case: pep508 urls + if dep.find("@") > 0 and ( + (dep.find("@") < dep.find("/") or "/" not in dep) and + (dep.find("@") < dep.find(":") or ":" not in dep) + ): + # This should be a 'pkgname @ https://...' style path, or + # 'pkname @ /local/file/path'. + return parse_as_folder_reference(dep.partition("@")[2].lstrip()) + + # Check if this is either not an url, or a file URL: + if dep.startswith(("/", "file://")) or ( + dep.find("/") > 0 and + dep.find("://") < 0): + if dep.startswith("file://"): + dep = urlunquote(urlparse(dep).path) + return dep + return None + + +def _extract_info_from_package(dependency, + extract_type=None, + debug=False, + include_build_requirements=False + ): + """ Internal function to extract metainfo from a package. + Currently supported info types: + + - name + - dependencies (a list of dependencies) + """ + output_folder = tempfile.mkdtemp(prefix="pythonpackage-metafolder-") + try: + extract_metainfo_files_from_package( + dependency, output_folder, debug=debug + ) + + with open(os.path.join(output_folder, "METADATA"), + "r", encoding="utf-8" + ) as f: + # Get metadata and cut away description (is after 2 linebreaks) + metadata_entries = f.read().partition("\n\n")[0].splitlines() + + if extract_type == "name": + name = None + for meta_entry in metadata_entries: + if meta_entry.lower().startswith("name:"): + return meta_entry.partition(":")[2].strip() + if name is None: + raise ValueError("failed to obtain package name") + return name + elif extract_type == "dependencies": + requirements = [] + if os.path.exists(os.path.join(output_folder, + 'pyproject.toml') + ) and include_build_requirements: + with open(os.path.join(output_folder, 'pyproject.toml')) as f: + build_sys = pytoml.load(f)['build-system'] + if "requires" in build_sys: + requirements += build_sys["requires"] + + # Add requirements from metadata: + requirements += [ + entry.rpartition("Requires-Dist:")[2].strip() + for entry in metadata_entries + if entry.startswith("Requires-Dist") + ] + + return list(set(requirements)) # remove duplicates + finally: + shutil.rmtree(output_folder) + + +package_name_cache = dict() + + +def get_package_name(dependency, + use_cache=True): + def timestamp(): + try: + return time.monotonic() + except AttributeError: + return time.time() # Python 2. + try: + value = package_name_cache[dependency] + if value[0] + 600.0 > timestamp() and use_cache: + return value[1] + except KeyError: + pass + result = _extract_info_from_package(dependency, extract_type="name") + package_name_cache[dependency] = (timestamp(), result) + return result + + +def get_package_dependencies(package, + recursive=False, + verbose=False, + include_build_requirements=False): + """ Obtain the dependencies from a package. Please note this + function is possibly SLOW, especially if you enable + the recursive mode. + """ + packages_processed = set() + package_queue = [package] + reqs = set() + reqs_as_names = set() + while len(package_queue) > 0: + current_queue = package_queue + package_queue = [] + for package_dep in current_queue: + new_reqs = set() + if verbose: + print("get_package_dependencies: resolving dependecy " + "to package name: ".format(package_dep)) + package = get_package_name(package_dep) + if package.lower() in packages_processed: + continue + if verbose: + print("get_package_dependencies: " + "processing package: {}".format(package)) + print("get_package_dependencies: " + "Packages seen so far: {}".format( + packages_processed + )) + packages_processed.add(package.lower()) + + # Use our regular folder processing to examine: + new_reqs = new_reqs.union(_extract_info_from_package( + package_dep, extract_type="dependencies", + debug=verbose, + include_build_requirements=include_build_requirements, + )) + + # Process new requirements: + if verbose: + print('get_package_dependencies: collected ' + "deps of '{}': {}".format( + package_dep, str(new_reqs), + )) + for new_req in new_reqs: + try: + req_name = get_package_name(new_req) + except ValueError as e: + if new_req.find(";") >= 0: + # Conditional dep where condition isn't met? + # --> ignore it + continue + if verbose: + print("get_package_dependencies: " + + "unexpected failure to get name " + + "of '" + str(new_req) + "': " + + str(e)) + raise RuntimeError( + "failed to get " + + "name of dependency: " + str(e) + ) + if req_name.lower() in reqs_as_names: + continue + if req_name.lower() not in packages_processed: + package_queue.append(new_req) + reqs.add(new_req) + reqs_as_names.add(req_name.lower()) + + # Bail out here if we're not scanning recursively: + if not recursive: + package_queue[:] = [] # wipe queue + break + if verbose: + print("get_package_dependencies: returning result: {}".format(reqs)) + return reqs + + +def get_dep_names_of_package( + package, + keep_version_pins=False, + recursive=False, + verbose=False, + include_build_requirements=False + ): + """ Gets the dependencies from the package in the given folder, + then attempts to deduce the actual package name resulting + from each dependency line, stripping away everything else. + """ + + # First, obtain the dependencies: + dependencies = get_package_dependencies( + package, recursive=recursive, verbose=verbose, + include_build_requirements=include_build_requirements, + ) + if verbose: + print("get_dep_names_of_package_folder: " + + "processing dependency list to names: " + + str(dependencies)) + + # Transform dependencies to their stripped down names: + # (they can still have version pins/restrictions, conditionals, ...) + dependency_names = set() + for dep in dependencies: + # If we are supposed to keep exact version pins, extract first: + pin_to_append = "" + if keep_version_pins and "(==" in dep and dep.endswith(")"): + # This is a dependency of the format: 'pkg (==1.0)' + pin_to_append = "==" + dep.rpartition("==")[2][:-1] + elif keep_version_pins and "==" in dep and not dep.endswith(")"): + # This is a dependency of the format: 'pkg==1.0' + pin_to_append = "==" + dep.rpartition("==")[2] + # Now get true (and e.g. case-corrected) dependency name: + dep_name = get_package_name(dep) + pin_to_append + dependency_names.add(dep_name) + return dependency_names diff --git a/pythonforandroid/toolchain.py b/pythonforandroid/toolchain.py index 99680d83de..ce2073b177 100644 --- a/pythonforandroid/toolchain.py +++ b/pythonforandroid/toolchain.py @@ -9,6 +9,7 @@ from __future__ import print_function from os import environ from pythonforandroid import __version__ +from pythonforandroid.pythonpackage import get_dep_names_of_package from pythonforandroid.recommendations import ( RECOMMENDED_NDK_API, RECOMMENDED_TARGET_API) from pythonforandroid.util import BuildInterruptingException, handle_build_exception @@ -179,6 +180,7 @@ def build_dist_from_args(ctx, dist, args): ctx, dist.recipes, bs, blacklist=blacklist )) + assert set(build_order).intersection(set(python_modules)) == set() ctx.recipe_build_order = build_order ctx.python_modules = python_modules @@ -197,7 +199,9 @@ def build_dist_from_args(ctx, dist, args): if dist.needs_build: ctx.prepare_dist(ctx.dist_name) - build_recipes(build_order, python_modules, ctx) + build_recipes(build_order, python_modules, ctx, args.private, + ignore_project_setup_py=args.ignore_setup_py, + ) ctx.bootstrap.run_distribute() @@ -307,7 +311,15 @@ def __init__(self): generic_parser.add_argument( '--requirements', help=('Dependencies of your app, should be recipe names or ' - 'Python modules'), + 'Python modules. NOT NECESSARY if you are using ' + 'Python 3 with --use-setup-py'), + default='') + + generic_parser.add_argument( + '--recipe-blacklist', + help=('Blacklist an internal recipe from use. Allows ' + 'disabling Python 3 core modules to save size'), + dest="recipe_blacklist", default='') generic_parser.add_argument( @@ -467,10 +479,33 @@ def add_parser(subparsers, *args, **kwargs): subparsers, 'apk', help='Build an APK', parents=[generic_parser]) + # This is actually an internal argument of the build.py + # (see pythonforandroid/bootstraps/common/build/build.py). + # However, it is also needed before the distribution is finally + # assembled for locating the setup.py / other build systems, which + # is why we also add it here: + parser_apk.add_argument( + '--private', dest='private', + help='the directory with the app source code files' + + ' (containing your main.py entrypoint)', + required=False, default=None) parser_apk.add_argument( '--release', dest='build_mode', action='store_const', const='release', default='debug', help='Build the PARSER_APK. in Release mode') + parser_apk.add_argument( + '--use-setup-py', dest="use_setup_py", + action='store_true', default=False, + help="Process the setup.py of a project if present. " + + "(Experimental!") + parser_apk.add_argument( + '--ignore-setup-py', dest="ignore_setup_py", + action='store_true', default=False, + help="Don't run the setup.py of a project if present. " + + "This may be required if the setup.py is not " + + "designed to work inside p4a (e.g. by installing " + + "dependencies that won't work or aren't desired " + + "on Android") parser_apk.add_argument( '--keystore', dest='keystore', action='store', default=None, help=('Keystore for JAR signing key, will use jarsigner ' @@ -530,6 +565,11 @@ def add_parser(subparsers, *args, **kwargs): args, unknown = parser.parse_known_args(sys.argv[1:]) args.unknown_args = unknown + if hasattr(args, "private") and args.private is not None: + # Pass this value on to the internal bootstrap build.py: + args.unknown_args += ["--private", args.private] + if args.ignore_setup_py: + args.use_setup_py = False self.args = args @@ -542,10 +582,54 @@ def add_parser(subparsers, *args, **kwargs): if args.debug: logger.setLevel(logging.DEBUG) + self.ctx = Context() + self.ctx.use_setup_py = args.use_setup_py + + have_setup_py_or_similar = False + if getattr(args, "private", None) is not None: + project_dir = getattr(args, "private") + if (os.path.exists(os.path.join(project_dir, "setup.py")) or + os.path.exists(os.path.join(project_dir, + "pyproject.toml"))): + have_setup_py_or_similar = True + # Process requirements and put version in environ if hasattr(args, 'requirements'): requirements = [] + # Add dependencies from setup.py, but only if they are recipes + # (because otherwise, setup.py itself will install them later) + if (have_setup_py_or_similar and + getattr(args, "use_setup_py", False)): + try: + info("Analyzing package dependencies. MAY TAKE A WHILE.") + # Get all the dependencies corresponding to a recipe: + dependencies = [ + dep.lower() for dep in + get_dep_names_of_package( + args.private, + keep_version_pins=True, + recursive=True, + verbose=True, + ) + ] + info("Dependencies obtained: " + str(dependencies)) + all_recipes = [ + recipe.lower() for recipe in + set(Recipe.list_recipes(self.ctx)) + ] + dependencies = set(dependencies).intersection( + set(all_recipes) + ) + # Add dependencies to argument list: + if len(dependencies) > 0: + if len(args.requirements) > 0: + args.requirements += u"," + args.requirements += u",".join(dependencies) + except ValueError: + # Not a python package, apparently. + pass + # Parse --requirements argument list: for requirement in split_argument_list(args.requirements): if "==" in requirement: @@ -558,7 +642,6 @@ def add_parser(subparsers, *args, **kwargs): self.warn_on_deprecated_args(args) - self.ctx = Context() self.storage_dir = args.storage_dir self.ctx.setup_dirs(self.storage_dir) self.sdk_dir = args.sdk_dir @@ -588,6 +671,25 @@ def warn_on_deprecated_args(self, args): Print warning messages for any deprecated arguments that were passed. """ + # Output warning if setup.py is present and neither --ignore-setup-py + # nor --use-setup-py was specified. + if getattr(args, "private", None) is not None and \ + (os.path.exists(os.path.join(args.private, "setup.py")) or + os.path.exists(os.path.join(args.private, "pyproject.toml")) + ): + if not getattr(args, "use_setup_py", False) and \ + not getattr(args, "ignore_setup_py", False): + warning(" **** FUTURE BEHAVIOR CHANGE WARNING ****") + warning("Your project appears to contain a setup.py file.") + warning("Currently, these are ignored by default.") + warning("This will CHANGE in an upcoming version!") + warning("") + warning("To ensure your setup.py is ignored, please specify:") + warning(" --ignore-setup-py") + warning("") + warning("To enable what will some day be the default, specify:") + warning(" --use-setup-py") + # NDK version is now determined automatically if args.ndk_version is not None: warning('--ndk-version is deprecated and no longer necessary, ' diff --git a/setup.py b/setup.py index 558dcb2f27..f045f0d47f 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,10 @@ # https://github.com/kivy/buildozer/issues/722 install_reqs = [ 'appdirs', 'colorama>=0.3.3', 'jinja2', 'six', - 'enum34; python_version<"3.4"', 'sh>=1.10; sys_platform!="nt"' + 'enum34; python_version<"3.4"', 'sh>=1.10; sys_platform!="nt"', + 'pep517', 'pytoml', 'virtualenv' ] +# (pep517, pytoml and virtualenv are used by pythonpackage.py) # By specifying every file manually, package_data will be able to # include them in binary distributions. Note that we have to add diff --git a/tests/test_pythonpackage.py b/tests/test_pythonpackage.py new file mode 100644 index 0000000000..a725646e3a --- /dev/null +++ b/tests/test_pythonpackage.py @@ -0,0 +1,111 @@ +""" +THESE TESTS DON'T RUN IN TRAVIS (takes too long!!) +ONLY THE BASIC ONES IN test_pythonpackage_basic.py DO. + +(This file basically covers all tests for any of the +functions that aren't already part of the basic +test set) +""" + +import os +import shutil +import tempfile + +from pythonforandroid.pythonpackage import ( + _extract_info_from_package, + extract_metainfo_files_from_package, + get_package_as_folder, + get_package_dependencies, +) + + +def local_repo_folder(): + return os.path.abspath(os.path.join( + os.path.dirname(__file__), ".." + )) + + +def test_get_package_dependencies(): + # TEST 1 from source code folder: + deps_nonrecursive = get_package_dependencies( + local_repo_folder(), recursive=False + ) + deps_recursive = get_package_dependencies( + local_repo_folder(), recursive=True + ) + # Check that jinja2 is returned as direct dep: + assert len([dep for dep in deps_nonrecursive + if "jinja2" in dep]) > 0 + # Check that MarkupSafe is returned as indirect dep of jinja2: + assert [ + dep for dep in deps_recursive + if "MarkupSafe" in dep + ] + # Check setuptools not being in non-recursive deps: + # (It will be in recursive ones due to p4a's pep517 dependency) + assert "setuptools" not in deps_nonrecursive + # Check setuptools is present in non-recursive deps, + # if we also add build requirements: + assert "setuptools" in get_package_dependencies( + local_repo_folder(), recursive=False, + include_build_requirements=True, + ) + + # TEST 2 from external ref: + # Check that jinja2 is returned as direct dep: + assert len([dep for dep in get_package_dependencies("python-for-android") + if "jinja2" in dep]) > 0 + # Check that MarkupSafe is returned as indirect dep of jinja2: + assert [ + dep for dep in get_package_dependencies( + "python-for-android", recursive=True + ) + if "MarkupSafe" in dep + ] + + +def test_extract_metainfo_files_from_package(): + # TEST 1 from external ref: + files_dir = tempfile.mkdtemp() + try: + extract_metainfo_files_from_package("python-for-android", + files_dir, debug=True) + assert os.path.exists(os.path.join(files_dir, "METADATA")) + finally: + shutil.rmtree(files_dir) + + # TEST 2 from local folder: + files_dir = tempfile.mkdtemp() + try: + extract_metainfo_files_from_package(local_repo_folder(), + files_dir, debug=True) + assert os.path.exists(os.path.join(files_dir, "METADATA")) + finally: + shutil.rmtree(files_dir) + + +def test_get_package_as_folder(): + # WARNING !!! This function behaves DIFFERENTLY if the requested package + # has a wheel available vs a source package. What we're getting is + # essentially what pip also would fetch, but this can obviously CHANGE + # depending on what is happening/available on PyPI. + # + # Therefore, this test doesn't really go in-depth. + (obtained_type, obtained_path) = \ + get_package_as_folder("python-for-android") + try: + assert obtained_type in {"source", "wheel"} + assert os.path.isdir(obtained_path) + finally: + # Try to ensure cleanup: + shutil.rmtree(obtained_path) + + +def test__extract_info_from_package(): + # This is indirectly already tested a lot through get_package_name() + # and get_package_dependencies(), so we'll just do one basic test: + + assert _extract_info_from_package( + local_repo_folder(), + extract_type="name" + ) == "python-for-android" diff --git a/tests/test_pythonpackage_basic.py b/tests/test_pythonpackage_basic.py new file mode 100644 index 0000000000..812ae70397 --- /dev/null +++ b/tests/test_pythonpackage_basic.py @@ -0,0 +1,310 @@ +""" +ONLY BASIC TEST SET. The additional ones are in test_pythonpackage.py. + +These are in a separate file because these were picked to run in travis, +while the other additional ones aren't (for build time reasons). +""" + +import mock +import os +import pytest +import shutil +import sys +import subprocess +import tempfile +import textwrap + +from pythonforandroid.pythonpackage import ( + _extract_info_from_package, + get_dep_names_of_package, + get_package_name, + _get_system_python_executable, + is_filesystem_path, + parse_as_folder_reference, + transform_dep_for_pip, +) + + +def local_repo_folder(): + return os.path.abspath(os.path.join( + os.path.dirname(__file__), ".." + )) + + +def fake_metadata_extract(dep_name, output_folder, debug=False): + # Helper function to write out fake metadata. + with open(os.path.join(output_folder, "METADATA"), "w") as f: + f.write(textwrap.dedent("""\ + Metadata-Version: 2.1 + Name: testpackage + Version: 0.1 + Requires-Dist: testpkg + Requires-Dist: testpkg2 + + Lorem Ipsum""" + )) + + +def test__extract_info_from_package(): + import pythonforandroid.pythonpackage # noqa + with mock.patch("pythonforandroid.pythonpackage." + "extract_metainfo_files_from_package", + fake_metadata_extract): + assert _extract_info_from_package( + "whatever", extract_type="name" + ) == "testpackage" + assert set(_extract_info_from_package( + "whatever", extract_type="dependencies" + )) == {"testpkg", "testpkg2"} + + +def test_get_package_name(): + # TEST 1 from external ref + with mock.patch("pythonforandroid.pythonpackage." + "extract_metainfo_files_from_package", + fake_metadata_extract): + assert get_package_name("TeStPackaGe") == "testpackage" + + # TEST 2 from a local folder, for which we'll create a fake package: + temp_d = tempfile.mkdtemp(prefix="p4a-pythonpackage-test-tmp-") + try: + with open(os.path.join(temp_d, "setup.py"), "w") as f: + f.write(textwrap.dedent("""\ + from setuptools import setup + setup(name="testpackage") + """ + )) + pkg_name = get_package_name(temp_d) + assert pkg_name == "testpackage" + finally: + shutil.rmtree(temp_d) + + +def test_get_dep_names_of_package(): + # TEST 1 from external ref: + # Check that colorama is returned without the install condition when + # just getting the names (it has a "; ..." conditional originally): + dep_names = get_dep_names_of_package("python-for-android") + assert "colorama" in dep_names + assert "setuptools" not in dep_names + dep_names = get_dep_names_of_package("python-for-android", + include_build_requirements=True) + assert "setuptools" in dep_names + + # TEST 2 from local folder: + assert "colorama" in get_dep_names_of_package(local_repo_folder()) + + # Now test that exact version pins are kept, but others aren't: + test_fake_package = tempfile.mkdtemp() + try: + with open(os.path.join(test_fake_package, "setup.py"), "w") as f: + f.write(textwrap.dedent("""\ + from setuptools import setup + + setup(name='fakeproject', + description='fake for testing', + install_requires=['buildozer==0.39', + 'python-for-android>=0.5.1'], + ) + """)) + # See that we get the deps with the exact version pin kept but + # the other version restriction gone: + assert set(get_dep_names_of_package( + test_fake_package, recursive=False, + keep_version_pins=True, verbose=True + )) == {"buildozer==0.39", "python-for-android"} + + # Make sure we also can get the fully cleaned up variant: + assert set(get_dep_names_of_package( + test_fake_package, recursive=False, + keep_version_pins=False, verbose=True + )) == {"buildozer", "python-for-android"} + + # Test with build requirements included: + dep_names = get_dep_names_of_package( + test_fake_package, recursive=False, + keep_version_pins=False, verbose=True, + include_build_requirements=True + ) + assert len( + {"buildozer", "python-for-android", "setuptools"}.intersection( + dep_names + ) + ) == 3 # all three must be included + finally: + shutil.rmtree(test_fake_package) + + +def test_transform_dep_for_pip(): + transformed = transform_dep_for_pip( + "python-for-android @ https://github.com/kivy/" + "python-for-android/archive/master.zip" + ) + expected = ( + "https://github.com/kivy/python-for-android/archive/master.zip" + "#egg-name=python-for-android" + ) + assert transformed == expected + + +def test_is_filesystem_path(): + assert is_filesystem_path("/some/test") + assert not is_filesystem_path("https://blubb") + assert not is_filesystem_path("test @ bla") + assert is_filesystem_path("/abc/c@d") + assert not is_filesystem_path("https://user:pw@host/") + + +def test_parse_as_folder_reference(): + assert parse_as_folder_reference("file:///a%20test") == "/a test" + assert parse_as_folder_reference("https://github.com") is None + assert parse_as_folder_reference("/a/folder") == "/a/folder" + assert parse_as_folder_reference("test @ /abc") == "/abc" + assert parse_as_folder_reference("test @ https://bla") is None + + +class TestGetSystemPythonExecutable(): + """ This contains all tests for _get_system_python_executable(). + + ULTRA IMPORTANT THING TO UNDERSTAND: (if you touch this) + + This code runs things with other python interpreters NOT in the tox + environment/virtualenv. + E.g. _get_system_python_executable() is outside in the regular + host environment! That also means all dependencies can be possibly + not present! + + This is kind of absurd that we need this to run the test at all, + but we can't test this inside tox's virtualenv: + """ + + def test_basic(self): + # Tests function inside tox env with no further special setup. + + # Get system-wide python bin: + pybin = _get_system_python_executable() + + # The python binary needs to match our major version to be correct: + pyversion = subprocess.check_output([ + pybin, "-c", "import sys; print(sys.version)" + ], stderr=subprocess.STDOUT).decode("utf-8", "replace") + assert pyversion.strip() == sys.version.strip() + + def run__get_system_python_executable(self, pybin): + """ Helper function to run our function. + + We want to see what _get_system_python_executable() does given + a specific python, so we need to make it import it and run it, + with that TARGET python, which this function does. + """ + cmd = [ + pybin, + "-c", + "import importlib\n" + "import json\n" + "import os\n" + "import sys\n" + "sys.path = [os.path.dirname(sys.argv[1])] + sys.path\n" + "m = importlib.import_module(\n" + " os.path.basename(sys.argv[1]).partition('.')[0]\n" + ")\n" + "print(m._get_system_python_executable())", + os.path.join(os.path.dirname(__file__), "..", + "pythonforandroid", "pythonpackage.py"), + ] + # Actual call to python: + try: + return subprocess.check_output( + cmd, stderr=subprocess.STDOUT + ).decode("utf-8", "replace").strip() + except subprocess.CalledProcessError as e: + raise RuntimeError("call failed, with output: " + str(e.output)) + + def test_systemwide_python(self): + # Get system-wide python bin seen from here first: + pybin = _get_system_python_executable() + # (this call was not a test, we really just need the path here) + + # Check that in system-wide python, the system-wide python is returned: + # IMPORTANT: understand that this runs OUTSIDE of any virtualenv. + try: + p1 = os.path.normpath( + self.run__get_system_python_executable(pybin) + ) + p2 = os.path.normpath(pybin) + assert p1 == p2 + except RuntimeError as e: + if "pep517" in str(e.args): + # System python probably doesn't have pep517 available! + # (remember this is not in a virtualenv) + # Not much we can do in that case since pythonpackage needs it, + # so we'll skip this particular check. + pass + else: + raise + + def test_virtualenv(self): + """ Verifies that _get_system_python_executable() works correctly + if called with a python binary as found inside a virtualenv. + """ + + # Get system-wide python bin seen from here first: + pybin = _get_system_python_executable() + # (this call was not a test, we really just need the path here) + + test_dir = tempfile.mkdtemp() + try: + # Check that in a virtualenv, the system-wide python is returned: + subprocess.check_output([ + pybin, "-m", "virtualenv", + "--python=" + str(sys.executable), + "--", + os.path.join(test_dir, "virtualenv") + ]) + subprocess.check_output([ + os.path.join(test_dir, "virtualenv", "bin", "pip"), + "install", "-U", "pip" + ]) + subprocess.check_output([ + os.path.join(test_dir, "virtualenv", "bin", "pip"), + "install", "-U", "pep517" + ]) + sys_python_path = self.run__get_system_python_executable( + os.path.join(test_dir, "virtualenv", "bin", "python") + ) + assert os.path.normpath(sys_python_path) == os.path.normpath(pybin) + finally: + shutil.rmtree(test_dir) + + @pytest.mark.skipif(int(sys.version.partition(".")[0]) < 3, + reason="venv is python 3 only") + def test_venv(self): + """ Verifies that _get_system_python_executable() works correctly + in a 'venv' (Python 3 only feature). + """ + + # Get system-wide python bin seen from here first: + pybin = _get_system_python_executable() + # (this call was not a test, we really just need the path here) + + test_dir = tempfile.mkdtemp() + try: + # Check that in a venv/pyvenv, the system-wide python is returned: + subprocess.check_output([ + pybin, "-m", "venv", "--", + os.path.join(test_dir, "venv") + ]) + subprocess.check_output([ + os.path.join(test_dir, "venv", "bin", "pip"), + "install", "-U", "pip" + ]) + subprocess.check_output([ + os.path.join(test_dir, "venv", "bin", "pip"), + "install", "-U", "pep517" + ]) + sys_python_path = self.run__get_system_python_executable( + os.path.join(test_dir, "venv", "bin", "python") + ) + assert os.path.normpath(sys_python_path) == os.path.normpath(pybin) + finally: + shutil.rmtree(test_dir) diff --git a/tox.ini b/tox.ini index 74cdc3940b..47ad3e029b 100644 --- a/tox.ini +++ b/tox.ini @@ -6,6 +6,7 @@ basepython = python3 deps = mock pytest + virtualenv # makes it possible to override pytest args, e.g. # tox -- tests/test_graph.py commands = pytest {posargs:tests/}